code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class TestRead(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.test_dir = os.path.dirname(__file__) <NEW_LINE> <DEDENT> def test_normalized_intensities(self): <NEW_LINE> <INDENT> for root, dirs, files in os.walk(os.path.join(self.test_dir, 'files')): <NEW_LINE> <INDENT> for filename in files: <NEW_LINE> <INDENT> if filename.endswith('.bin'): <NEW_LINE> <INDENT> d = Dataset(os.path.join(root, filename)) <NEW_LINE> d.parse() <NEW_LINE> self.assertLessEqual(np.nanmax(d.intensity_image), np.float64(1)) <NEW_LINE> self.assertGreaterEqual(np.nanmin(d.intensity_image), np.float64(0)) | Test basic reading of binary files
| 6259904807f4c71912bb07c4 |
class HumanPlayer(Player): <NEW_LINE> <INDENT> def __init__(self, state_dim, action_dim): <NEW_LINE> <INDENT> super().__init__(state_dim, action_dim) <NEW_LINE> <DEDENT> def act(self, state): <NEW_LINE> <INDENT> action = input('Enter action: ') <NEW_LINE> action = int(action) <NEW_LINE> action -= 1 <NEW_LINE> if action <= 2: <NEW_LINE> <INDENT> action+=6 <NEW_LINE> <DEDENT> elif action >=6: <NEW_LINE> <INDENT> action-=6 <NEW_LINE> <DEDENT> return int(action) | This class implements Player abstract class
such that it allows human user to make
a desired action using console input | 625990488da39b475be04582 |
class C2600(Router): <NEW_LINE> <INDENT> chassis_to_default_adapter = {"2610": "C2600-MB-1E", "2611": "C2600-MB-2E", "2620": "C2600-MB-1FE", "2621": "C2600-MB-2FE", "2610XM": "C2600-MB-1FE", "2611XM": "C2600-MB-2FE", "2620XM": "C2600-MB-1FE", "2621XM": "C2600-MB-2FE", "2650XM": "C2600-MB-1FE", "2651XM": "C2600-MB-2FE"} <NEW_LINE> def __init__(self, module, server, chassis="2610"): <NEW_LINE> <INDENT> Router.__init__(self, module, server, platform="c2600") <NEW_LINE> self._platform_settings = {"ram": 128, "nvram": 128, "disk0": 0, "disk1": 0, "chassis": chassis, "iomem": 15, "clock_divisor": 8} <NEW_LINE> self._platform_settings["slot0"] = self.chassis_to_default_adapter[chassis] <NEW_LINE> self._settings.update(self._platform_settings) <NEW_LINE> self._defaults = self._settings.copy() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Router c2600" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def symbolName(): <NEW_LINE> <INDENT> return "Router c2600" | Dynamips c2600 router.
:param module: parent module for this node
:param server: GNS3 server instance | 625990488a43f66fc4bf3527 |
class OUNoise: <NEW_LINE> <INDENT> def __init__(self, size, seed, mu=0., theta=0.15, sigma=0.2, wiener_random=False): <NEW_LINE> <INDENT> self.mu = mu * np.ones(size) <NEW_LINE> self.theta = theta <NEW_LINE> self.sigma = sigma <NEW_LINE> self.seed = random.seed(seed) <NEW_LINE> self.wiener_random = wiener_random <NEW_LINE> self.size = size <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.state = copy.copy(self.mu) <NEW_LINE> <DEDENT> def sample(self): <NEW_LINE> <INDENT> x = self.state <NEW_LINE> if self.wiener_random: <NEW_LINE> <INDENT> dx = self.theta * (self.mu - x) + self.sigma * np.array( [random.random() for i in range(len(x))]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dx = self.theta * (self.mu - x) + self.sigma * np.random.standard_normal( self.size) <NEW_LINE> <DEDENT> self.state = x + dx <NEW_LINE> return self.state | Ornstein-Uhlenbeck process. | 6259904845492302aabfd864 |
class TypeSpecificPartType8(KaitaiStruct): <NEW_LINE> <INDENT> SEQ_FIELDS = ["working_buffer_fractional_size", "expansion_buffer_size", "decompressor_id", "reserved"] <NEW_LINE> def __init__(self, _io, _parent=None, _root=None): <NEW_LINE> <INDENT> self._io = _io <NEW_LINE> self._parent = _parent <NEW_LINE> self._root = _root if _root else self <NEW_LINE> self._debug = collections.defaultdict(dict) <NEW_LINE> <DEDENT> def _read(self): <NEW_LINE> <INDENT> self._debug['working_buffer_fractional_size']['start'] = self._io.pos() <NEW_LINE> self.working_buffer_fractional_size = self._io.read_u1() <NEW_LINE> self._debug['working_buffer_fractional_size']['end'] = self._io.pos() <NEW_LINE> self._debug['expansion_buffer_size']['start'] = self._io.pos() <NEW_LINE> self.expansion_buffer_size = self._io.read_u1() <NEW_LINE> self._debug['expansion_buffer_size']['end'] = self._io.pos() <NEW_LINE> self._debug['decompressor_id']['start'] = self._io.pos() <NEW_LINE> self.decompressor_id = self._io.read_s2be() <NEW_LINE> self._debug['decompressor_id']['end'] = self._io.pos() <NEW_LINE> self._debug['reserved']['start'] = self._io.pos() <NEW_LINE> self.reserved = self._io.read_u2be() <NEW_LINE> self._debug['reserved']['end'] = self._io.pos() <NEW_LINE> if not self.reserved == 0: <NEW_LINE> <INDENT> raise kaitaistruct.ValidationNotEqualError(0, self.reserved, self._io, u"/types/header/types/type_specific_part_type_8/seq/3") | The type-specific part of a compressed resource header with header type `8`.
| 6259904863b5f9789fe864ff |
class HighBandwidthReader(ReaderModule): <NEW_LINE> <INDENT> def custom_args(self, parser): <NEW_LINE> <INDENT> grp = parser.add_argument_group("module", "module specific arguments") <NEW_LINE> grp.add_argument("--rate", type=float, required=True, help="sample rate in Hz") <NEW_LINE> <DEDENT> async def run(self, parsed_args, output): <NEW_LINE> <INDENT> start_ts = time_now() <NEW_LINE> period = 1 <NEW_LINE> samples_per_period = np.round(parsed_args.rate * period) <NEW_LINE> while True: <NEW_LINE> <INDENT> end_ts = start_ts + period * 1e6 <NEW_LINE> ts = np.linspace(start_ts, end_ts, samples_per_period, endpoint=False) <NEW_LINE> vals = np.linspace(0, 33, samples_per_period) <NEW_LINE> start_ts = end_ts <NEW_LINE> chunk = np.hstack((ts[:, None], vals[:, None])) <NEW_LINE> await output.write(chunk) <NEW_LINE> await asyncio.sleep(period) | Produce a 1Hz ramp sampled at [rate] Hz | 62599048379a373c97d9a3bd |
class TestRemoveDiagonalGatesBeforeMeasureFixedPoint(QiskitTestCase): <NEW_LINE> <INDENT> def test_optimize_rz_z(self): <NEW_LINE> <INDENT> qr = QuantumRegister(1, 'qr') <NEW_LINE> cr = ClassicalRegister(1, 'cr') <NEW_LINE> circuit = QuantumCircuit(qr, cr) <NEW_LINE> circuit.rz(0.1, qr[0]) <NEW_LINE> circuit.z(qr[0]) <NEW_LINE> circuit.measure(qr[0], cr[0]) <NEW_LINE> expected = QuantumCircuit(qr, cr) <NEW_LINE> expected.measure(qr[0], cr[0]) <NEW_LINE> pass_manager = PassManager() <NEW_LINE> pass_manager.append( [RemoveDiagonalGatesBeforeMeasure(), DAGFixedPoint()], do_while=lambda property_set: not property_set['dag_fixed_point']) <NEW_LINE> after = pass_manager.run(circuit) <NEW_LINE> self.assertEqual(expected, after) | Test remove_diagonal_gates_before_measure optimizations in
a transpiler, using fixed point. | 62599048dc8b845886d5494e |
class DetailedLogWriterFirstStage(FirstStageBaseEventClass): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> FirstStageBaseEventClass.__init__(self, *args, **kwargs) <NEW_LINE> self.task_function = self.process_event <NEW_LINE> <DEDENT> def process_event(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> event = self.q.get(timeout=0.05) <NEW_LINE> print("HERE: GOT AN EVENT!: " + event.MessageName) <NEW_LINE> if not event.MessageName.startswith('key down'): <NEW_LINE> <INDENT> print("HERE: NOT USEFUL!") <NEW_LINE> self.logger.debug('not a useful event') <NEW_LINE> return <NEW_LINE> <DEDENT> print("HERE: AFTER GOT EVENT") <NEW_LINE> process_name = self.get_process_name(event) <NEW_LINE> print("HERE: AFTER PROCESS NAME: " + str(process_name)) <NEW_LINE> loggable = self.needs_logging(event, process_name) <NEW_LINE> print("HERE: AFTER NEEDS LOGGING" + str(loggable)) <NEW_LINE> if not loggable: <NEW_LINE> <INDENT> print("HERE: NOT LOGGABLE!") <NEW_LINE> self.logger.debug("not loggable, we are outta here\n") <NEW_LINE> return <NEW_LINE> <DEDENT> print("HERE: LET'S LOG IT KEY: " + event.Key) <NEW_LINE> self.logger.debug("loggable, lets log it. key: %s" % to_unicode(event.Key)) <NEW_LINE> username = self.get_username() <NEW_LINE> self.sst_q.put((process_name, username, event)) <NEW_LINE> <DEDENT> except Empty: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.logger.debug("some exception was caught in " "the logwriter loop...\nhere it is:\n", exc_info=True) <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> def needs_logging(self, event, process_name): <NEW_LINE> <INDENT> if self.subsettings['General']['Applications Not Logged'] != 'None': <NEW_LINE> <INDENT> for path in self.subsettings['General']['Applications Not Logged'].split(';'): <NEW_LINE> <INDENT> if os.path.exists(path) and os.stat(path) == os.stat(process_name): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def get_process_name(self, event): <NEW_LINE> <INDENT> if os.name == 'nt': <NEW_LINE> <INDENT> hwnd = event.Window <NEW_LINE> try: <NEW_LINE> <INDENT> threadpid, procpid = win32process.GetWindowThreadProcessId(hwnd) <NEW_LINE> mypyproc = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, False, procpid) <NEW_LINE> procname = win32process.GetModuleFileNameEx(mypyproc, 0) <NEW_LINE> return procname <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return "noprocname" <NEW_LINE> <DEDENT> <DEDENT> elif os.name == 'posix': <NEW_LINE> <INDENT> print("HERE: RETURNING PROC NAME: " + str(event.WindowProcName)) <NEW_LINE> return to_unicode(event.WindowProcName) <NEW_LINE> <DEDENT> <DEDENT> def get_username(self): <NEW_LINE> <INDENT> username = None <NEW_LINE> for varname in ['USERNAME','USER','LOGNAME']: <NEW_LINE> <INDENT> username = os.getenv(varname) <NEW_LINE> if username is not None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if username is None: <NEW_LINE> <INDENT> username = 'none' <NEW_LINE> <DEDENT> return username <NEW_LINE> <DEDENT> def spawn_second_stage_thread(self): <NEW_LINE> <INDENT> self.sst_q = Queue(0) <NEW_LINE> self.sst = DetailedLogWriterSecondStage(self.dir_lock, self.sst_q, self.loggername) | Standard detailed log writer, first stage.
Grabs keyboard events, finds the process name and username, then
passes the event on to the second stage. | 6259904894891a1f408ba0bf |
class JDSSResourceIsBusyException(JDSSException): <NEW_LINE> <INDENT> message = _("JDSS resource %(res)s is busy.") | Resource have dependents | 6259904871ff763f4b5e8b37 |
class PointOfInterestManageListView(LoginRequiredMixin, OwnerMixin, ListView): <NEW_LINE> <INDENT> model = PointOfInterest <NEW_LINE> template_name = "dashboard/poi_list.html" <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return self.request.user.profile.fetch_points_of_interests() | The view of a list of the user's points of interest. | 6259904830c21e258be99b99 |
class IElemNode(IElem, IElemNameMixin): <NEW_LINE> <INDENT> def __init__(self, elem, parent=None): <NEW_LINE> <INDENT> super(IElemNode, self).__init__(elem, parent) <NEW_LINE> self.child_nodes = [ c for c in self.child_elements if isinstance(c, IElemNode)] | Introspection node. | 625990486fece00bbacccd4a |
class CIFARNIN(Chain): <NEW_LINE> <INDENT> def __init__(self, channels, first_ksizes, in_channels=3, in_size=(32, 32), classes=10): <NEW_LINE> <INDENT> super(CIFARNIN, self).__init__() <NEW_LINE> self.in_size = in_size <NEW_LINE> self.classes = classes <NEW_LINE> with self.init_scope(): <NEW_LINE> <INDENT> self.features = SimpleSequential() <NEW_LINE> with self.features.init_scope(): <NEW_LINE> <INDENT> for i, channels_per_stage in enumerate(channels): <NEW_LINE> <INDENT> stage = SimpleSequential() <NEW_LINE> with stage.init_scope(): <NEW_LINE> <INDENT> for j, out_channels in enumerate(channels_per_stage): <NEW_LINE> <INDENT> if (j == 0) and (i != 0): <NEW_LINE> <INDENT> if i == 1: <NEW_LINE> <INDENT> setattr(stage, "pool{}".format(i + 1), partial( F.max_pooling_2d, ksize=3, stride=2, pad=1, cover_all=False)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(stage, "pool{}".format(i + 1), partial( F.average_pooling_2d, ksize=3, stride=2, pad=1)) <NEW_LINE> <DEDENT> setattr(stage, "dropout{}".format(i + 1), partial( F.dropout, ratio=0.5)) <NEW_LINE> <DEDENT> kernel_size = first_ksizes[i] if j == 0 else 1 <NEW_LINE> padding = (kernel_size - 1) // 2 <NEW_LINE> setattr(stage, "unit{}".format(j + 1), NINConv( in_channels=in_channels, out_channels=out_channels, ksize=kernel_size, pad=padding)) <NEW_LINE> in_channels = out_channels <NEW_LINE> <DEDENT> <DEDENT> setattr(self.features, "stage{}".format(i + 1), stage) <NEW_LINE> <DEDENT> <DEDENT> self.output = SimpleSequential() <NEW_LINE> with self.output.init_scope(): <NEW_LINE> <INDENT> setattr(self.output, "final_conv", NINConv( in_channels=in_channels, out_channels=classes, ksize=1)) <NEW_LINE> setattr(self.output, "final_pool", partial( F.average_pooling_2d, ksize=8, stride=1)) <NEW_LINE> setattr(self.output, "final_flatten", partial( F.reshape, shape=(-1, classes))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> x = self.features(x) <NEW_LINE> x = self.output(x) <NEW_LINE> return x | NIN model for CIFAR from 'Network In Network,' https://arxiv.org/abs/1312.4400.
Parameters:
----------
channels : list of list of int
Number of output channels for each unit.
first_ksizes : list of int
Convolution window sizes for the first units in each stage.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (32, 32)
Spatial size of the expected input image.
classes : int, default 10
Number of classification classes. | 62599048596a897236128f79 |
class DataSet(DataObject): <NEW_LINE> <INDENT> def GetPointData(self): <NEW_LINE> <INDENT> return self.GetAttributes(ArrayAssociation.POINT) <NEW_LINE> <DEDENT> def GetCellData(self): <NEW_LINE> <INDENT> return self.GetAttributes(ArrayAssociation.CELL) <NEW_LINE> <DEDENT> PointData = property(GetPointData, None, None, "This property returns the point data of the dataset.") <NEW_LINE> CellData = property(GetCellData, None, None, "This property returns the cell data of a dataset.") | This is a python friendly wrapper of a vtkDataSet that defines
a few useful properties. | 6259904826238365f5fadef0 |
class TargetPagesMissing(WikiTransferException): <NEW_LINE> <INDENT> pass | Thrown if no page range has been specified to operate on. | 62599048e64d504609df9d9a |
class TimedeltaProperties(Properties): <NEW_LINE> <INDENT> def to_pytimedelta(self): <NEW_LINE> <INDENT> return self.values.to_pytimedelta() <NEW_LINE> <DEDENT> @property <NEW_LINE> def components(self): <NEW_LINE> <INDENT> return self.values.components.set_index(self.index) | Accessor object for datetimelike properties of the Series values.
Examples
--------
>>> s.dt.hours
>>> s.dt.seconds
Returns a Series indexed like the original Series.
Raises TypeError if the Series does not contain datetimelike values. | 6259904815baa72349463325 |
class ParseGames(object): <NEW_LINE> <INDENT> def __init__(self, file_name): <NEW_LINE> <INDENT> self.file_name = file_name <NEW_LINE> self.raw_games = self._split_games() <NEW_LINE> self.games = [ParseGame(game_pgn_text) for game_pgn_text in self.raw_games] <NEW_LINE> <DEDENT> def _split_games(self): <NEW_LINE> <INDENT> with open(self.file_name) as f: <NEW_LINE> <INDENT> pgn_content = f.readlines() <NEW_LINE> <DEDENT> is_tag, is_move = False, False <NEW_LINE> games, game = [], '' <NEW_LINE> for pgn_line in pgn_content: <NEW_LINE> <INDENT> if pgn_line[0] == '[' and is_tag and is_move: <NEW_LINE> <INDENT> games.append(game) <NEW_LINE> is_tag, is_move = False, False <NEW_LINE> game = '' <NEW_LINE> <DEDENT> if pgn_line[0] == '[': <NEW_LINE> <INDENT> is_tag = True <NEW_LINE> <DEDENT> elif pgn_line[0] == '1': <NEW_LINE> <INDENT> is_move = True <NEW_LINE> <DEDENT> game = game + pgn_line <NEW_LINE> <DEDENT> games.append(game) <NEW_LINE> return games | This class parses PGN file that contains many games. | 6259904845492302aabfd866 |
class OptionsFlowHandler(config_entries.OptionsFlow): <NEW_LINE> <INDENT> def __init__(self, config_entry: config_entries.ConfigEntry) -> None: <NEW_LINE> <INDENT> self.config_entry = config_entry <NEW_LINE> <DEDENT> async def async_step_init(self, user_input=None): <NEW_LINE> <INDENT> if user_input is not None: <NEW_LINE> <INDENT> return self.async_create_entry(title="", data=user_input) <NEW_LINE> <DEDENT> data_schema = vol.Schema( { vol.Required( CONF_UPDATE_ENABLED, default=self.config_entry.options.get(CONF_UPDATE_ENABLED, False), ): cv.boolean, } ) <NEW_LINE> return self.async_show_form(step_id="init", data_schema=data_schema) | Handle a option flow for Subaru. | 6259904891af0d3eaad3b1b9 |
class ReplayExceptionClient(object): <NEW_LINE> <INDENT> def __init__(self, client): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self.client.extensions['exceptions'] = self <NEW_LINE> self.client.recreate_error_locally = self.recreate_error_locally <NEW_LINE> self.client._recreate_error_locally = self._recreate_error_locally <NEW_LINE> self.client._get_futures_error = self._get_futures_error <NEW_LINE> self.client.get_futures_error = self.get_futures_error <NEW_LINE> <DEDENT> @property <NEW_LINE> def scheduler(self): <NEW_LINE> <INDENT> return self.client.scheduler <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def _get_futures_error(self, future): <NEW_LINE> <INDENT> futures = [f for f in futures_of(future) if f.status == 'error'] <NEW_LINE> if not futures: <NEW_LINE> <INDENT> raise ValueError("No errored futures passed") <NEW_LINE> <DEDENT> out = yield self.scheduler.cause_of_failure( keys=[f.key for f in futures]) <NEW_LINE> deps, task = out['deps'], out['task'] <NEW_LINE> if isinstance(task, dict): <NEW_LINE> <INDENT> function, args, kwargs = _deserialize(**task) <NEW_LINE> raise gen.Return((function, args, kwargs, deps)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> function, args, kwargs = _deserialize(task=task) <NEW_LINE> raise gen.Return((function, args, kwargs, deps)) <NEW_LINE> <DEDENT> <DEDENT> def get_futures_error(self, future): <NEW_LINE> <INDENT> return self.client.sync(self._get_futures_error, future) <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def _recreate_error_locally(self, future): <NEW_LINE> <INDENT> yield wait(future) <NEW_LINE> out = yield self._get_futures_error(future) <NEW_LINE> function, args, kwargs, deps = out <NEW_LINE> futures = self.client._graph_to_futures({}, deps) <NEW_LINE> data = yield self.client._gather(futures) <NEW_LINE> args = pack_data(args, data) <NEW_LINE> kwargs = pack_data(kwargs, data) <NEW_LINE> raise gen.Return((function, args, kwargs)) <NEW_LINE> <DEDENT> def recreate_error_locally(self, future): <NEW_LINE> <INDENT> func, args, kwargs = sync(self.client.loop, self._recreate_error_locally, future) <NEW_LINE> func(*args, **kwargs) | A plugin for the client allowing replay of remote exceptions locally
Adds the following methods (and their async variants)to the given client:
- ``recreate_error_locally``: main user method
- ``get_futures_error``: gets the task, its details and dependencies,
responsible for failure of the given future. | 62599048ec188e330fdf9c31 |
class Land(): <NEW_LINE> <INDENT> def __init__(self, name, kontinent, pos1, pos2, pos3, pos4, pos5): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.kontinent = kontinent <NEW_LINE> self.pos1 = pos1 <NEW_LINE> self.pos2 = pos2 <NEW_LINE> self.pos3 = pos3 <NEW_LINE> self.pos4 = pos4 <NEW_LINE> self.pos5 = pos5 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | Konstruktor: Name, Kontinent | 62599048379a373c97d9a3bf |
class NumberWords: <NEW_LINE> <INDENT> _WORD_MAP = ( 'one', 'two', 'three', 'four', 'five', ) <NEW_LINE> def __init__(self, start, stop): <NEW_LINE> <INDENT> self.start = start <NEW_LINE> self.stop = stop <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.start > self.stop or self.start > len(self._WORD_MAP): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> current = self.start <NEW_LINE> self.start += 1 <NEW_LINE> return self._WORD_MAP[current - 1] | Counts by word numbers, up to a maximum of five | 62599048711fe17d825e1668 |
class NVVM(object): <NEW_LINE> <INDENT> _PROTOTYPES = { 'nvvmVersion': (nvvm_result, POINTER(c_int), POINTER(c_int)), 'nvvmCreateProgram': (nvvm_result, POINTER(nvvm_program)), 'nvvmDestroyProgram': (nvvm_result, POINTER(nvvm_program)), 'nvvmAddModuleToProgram': ( nvvm_result, nvvm_program, c_char_p, c_size_t, c_char_p), 'nvvmCompileProgram': ( nvvm_result, nvvm_program, c_int, POINTER(c_char_p)), 'nvvmGetCompiledResultSize': ( nvvm_result, nvvm_program, POINTER(c_size_t)), 'nvvmGetCompiledResult': (nvvm_result, nvvm_program, c_char_p), 'nvvmGetProgramLogSize': (nvvm_result, nvvm_program, POINTER(c_size_t)), 'nvvmGetProgramLog': (nvvm_result, nvvm_program, c_char_p), } <NEW_LINE> __INSTANCE = None <NEW_LINE> def __new__(cls): <NEW_LINE> <INDENT> with _nvvm_lock: <NEW_LINE> <INDENT> if cls.__INSTANCE is None: <NEW_LINE> <INDENT> cls.__INSTANCE = inst = object.__new__(cls) <NEW_LINE> try: <NEW_LINE> <INDENT> inst.driver = open_cudalib('nvvm') <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> cls.__INSTANCE = None <NEW_LINE> errmsg = ("libNVVM cannot be found. Do `conda install " "cudatoolkit`:\n%s") <NEW_LINE> raise NvvmSupportError(errmsg % e) <NEW_LINE> <DEDENT> for name, proto in inst._PROTOTYPES.items(): <NEW_LINE> <INDENT> func = getattr(inst.driver, name) <NEW_LINE> func.restype = proto[0] <NEW_LINE> func.argtypes = proto[1:] <NEW_LINE> setattr(inst, name, func) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return cls.__INSTANCE <NEW_LINE> <DEDENT> def get_version(self): <NEW_LINE> <INDENT> major = c_int() <NEW_LINE> minor = c_int() <NEW_LINE> err = self.nvvmVersion(byref(major), byref(minor)) <NEW_LINE> self.check_error(err, 'Failed to get version.') <NEW_LINE> return major.value, minor.value <NEW_LINE> <DEDENT> def check_error(self, error, msg, exit=False): <NEW_LINE> <INDENT> if error: <NEW_LINE> <INDENT> exc = NvvmError(msg, RESULT_CODE_NAMES[error]) <NEW_LINE> if exit: <NEW_LINE> <INDENT> print(exc) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exc | Process-wide singleton.
| 62599048dc8b845886d54950 |
class TestTankisNitrox32_2(TestTank): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.mytank = Tank(f_o2=0.32, max_ppo2=1.4) <NEW_LINE> <DEDENT> def test_name(self): <NEW_LINE> <INDENT> assert str(self.mytank) == 'Nitrox 32' <NEW_LINE> <DEDENT> def test_mod(self): <NEW_LINE> <INDENT> assert self.mytank.mod == 33 <NEW_LINE> <DEDENT> def test_mod_at_end(self): <NEW_LINE> <INDENT> self.assertAlmostEqual(self.mytank.get_mod_for_given_end(30), 31.0195918624, 5, 'wrong mod at end:%s' % self.mytank.get_mod_for_given_end(30)) <NEW_LINE> <DEDENT> def test_end_at_depth(self): <NEW_LINE> <INDENT> self.assertAlmostEqual(self.mytank.get_end_for_given_depth(40), 38.7573409377, 5, 'wrong end at depth:%s' % self.mytank.get_end_for_given_depth(40)) | Test Nitrox32 Tank Z. | 62599048b57a9660fecd2e11 |
class ZophBackupRestore(BackupRestore): <NEW_LINE> <INDENT> def backup_pre(self, packet): <NEW_LINE> <INDENT> actions.superuser_run('zoph', ['dump-database']) <NEW_LINE> <DEDENT> def restore_post(self, packet): <NEW_LINE> <INDENT> actions.superuser_run('zoph', ['restore-database']) | Component to backup/restore Zoph database | 6259904810dbd63aa1c71f70 |
class ChannelSparseConvLayer(niftynet.layer.convolution.ConvLayer): <NEW_LINE> <INDENT> def __init__(self,*args,**kwargs): <NEW_LINE> <INDENT> super(ChannelSparseConvLayer,self).__init__(*args,**kwargs) <NEW_LINE> <DEDENT> def layer_op(self,input_tensor,input_mask,output_mask): <NEW_LINE> <INDENT> sparse_input_shape = input_tensor.shape.as_list() <NEW_LINE> if input_mask is None: <NEW_LINE> <INDENT> _input_mask=tf.ones([sparse_input_shape[-1]])>0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _input_mask=input_mask <NEW_LINE> <DEDENT> if output_mask is None: <NEW_LINE> <INDENT> _output_mask=tf.ones([self.n_output_chns])>0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _output_mask=output_mask <NEW_LINE> <DEDENT> n_full_input_chns = _input_mask.shape.as_list()[0] <NEW_LINE> spatial_rank = layer_util.infer_spatial_rank(input_tensor) <NEW_LINE> w_full_size = layer_util.expand_spatial_params( self.kernel_size, spatial_rank) <NEW_LINE> w_full_size = w_full_size + (n_full_input_chns, self.n_output_chns) <NEW_LINE> full_stride = layer_util.expand_spatial_params( self.stride, spatial_rank) <NEW_LINE> full_dilation = layer_util.expand_spatial_params( self.dilation, spatial_rank) <NEW_LINE> conv_kernel = tf.get_variable( 'w', shape=w_full_size, initializer=self.initializers['w'], regularizer=self.regularizers['w']) <NEW_LINE> sparse_kernel = tf.transpose(tf.boolean_mask( tf.transpose(tf.boolean_mask( tf.transpose(conv_kernel,[4,3,2,1,0]), _output_mask),[1,0,2,3,4]),_input_mask),[4,3,2,0,1]) <NEW_LINE> output_tensor = tf.nn.convolution(input=input_tensor, filter=sparse_kernel, strides=full_stride, dilation_rate=full_dilation, padding=self.padding, name='conv') <NEW_LINE> if output_mask is None: <NEW_LINE> <INDENT> old_shape=output_tensor.shape.as_list() <NEW_LINE> old_shape[-1]=self.n_output_chns <NEW_LINE> output_tensor.set_shape(old_shape) <NEW_LINE> <DEDENT> if not self.with_bias: <NEW_LINE> <INDENT> return output_tensor <NEW_LINE> <DEDENT> bias_term = tf.get_variable( 'b', shape=self.n_output_chns, initializer=self.initializers['b'], regularizer=self.regularizers['b']) <NEW_LINE> sparse_bias = tf.boolean_mask(bias_term,output_mask) <NEW_LINE> output_tensor = tf.nn.bias_add(output_tensor, sparse_bias, name='add_bias') <NEW_LINE> return output_tensor | Channel sparse convolutions perform convolulations over
a subset of image channels and generate a subset of output
channels. This enables spatial dropout without wasted computations | 6259904830c21e258be99b9b |
class Channel(NamedTuple): <NEW_LINE> <INDENT> name: str <NEW_LINE> delivery_system: str <NEW_LINE> frequency: int <NEW_LINE> symbol_rate: int <NEW_LINE> inner_fec: str <NEW_LINE> modulation: str <NEW_LINE> inversion: str | A dvb channel config object | 625990481f5feb6acb163f8a |
class MinimaxAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> def miniMax(state, index, depth): <NEW_LINE> <INDENT> if depth == 1 or len(state.getLegalActions(index)) == 0: <NEW_LINE> <INDENT> return scoreEvaluationFunction(state), None <NEW_LINE> <DEDENT> actions = state.getLegalActions(index) <NEW_LINE> choices = [] <NEW_LINE> if index == 0: <NEW_LINE> <INDENT> maximum = -5000 <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> value = miniMax(state.generatePacmanSuccessor(action), state.getNumAgents() - 1, depth - 1)[0] <NEW_LINE> if value > maximum: <NEW_LINE> <INDENT> choices = [action] <NEW_LINE> maximum = value <NEW_LINE> <DEDENT> elif value == maximum: <NEW_LINE> <INDENT> choices.append(action) <NEW_LINE> <DEDENT> <DEDENT> return maximum, random.choice(choices) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> minimum = 5000 <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> value = miniMax(state.generateSuccessor(index, action), index - 1, depth - 1)[0] <NEW_LINE> if value < minimum: <NEW_LINE> <INDENT> choices = [action] <NEW_LINE> minimum = value <NEW_LINE> <DEDENT> elif value == minimum: <NEW_LINE> <INDENT> choices.append(action) <NEW_LINE> <DEDENT> <DEDENT> return minimum, random.choice(choices) <NEW_LINE> <DEDENT> <DEDENT> return miniMax(gameState, 0, self.depth * gameState.getNumAgents() + 1)[1] | Your minimax agent (problem 1) | 62599048d10714528d69f058 |
class CompositeCollection(object, IList, ICollection, IEnumerable, INotifyCollectionChanged, ICollectionViewFactory, IWeakEventListener): <NEW_LINE> <INDENT> def Add(self, newItem): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def add_CollectionChanged(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Clear(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Contains(self, containItem): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def CopyTo(self, array, index): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def IndexOf(self, indexItem): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Insert(self, insertIndex, insertItem): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReceiveWeakEvent(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Remove(self, removeItem): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def RemoveAt(self, removeIndex): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def remove_CollectionChanged(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __add__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __contains__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __getitem__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __iter__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __len__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(self, capacity=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __repr__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __setitem__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Count = property(lambda self: object(), lambda self, v: None, lambda self: None) | Enables multiple collections and items to be displayed as a single list.
CompositeCollection()
CompositeCollection(capacity: int) | 62599048009cb60464d028ca |
class Embed: <NEW_LINE> <INDENT> __slots__ = ( "color", "title", "url", "author", "description", "fields", "image", "thumbnail", "footer", "timestamp", ) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.color = kwargs.get("color") <NEW_LINE> self.title = kwargs.get("title") <NEW_LINE> self.url = kwargs.get("url") <NEW_LINE> self.description = kwargs.get("description") <NEW_LINE> self.timestamp = kwargs.get("timestamp") <NEW_LINE> if self.timestamp == "now": <NEW_LINE> <INDENT> self.timestamp = str(datetime.datetime.utcnow()) <NEW_LINE> <DEDENT> self.author = None <NEW_LINE> self.thumbnail = None <NEW_LINE> self.image = None <NEW_LINE> self.footer = None <NEW_LINE> self.fields = [] <NEW_LINE> image_url = kwargs.get("image_url") <NEW_LINE> if image_url is not None: <NEW_LINE> <INDENT> self.set_image(image_url) <NEW_LINE> <DEDENT> thumbnail_url = kwargs.get("thumbnail_url") <NEW_LINE> if thumbnail_url is not None: <NEW_LINE> <INDENT> self.set_thumbnail(thumbnail_url) <NEW_LINE> <DEDENT> <DEDENT> def del_field(self, index: int) -> None: <NEW_LINE> <INDENT> self.fields.pop(index) <NEW_LINE> <DEDENT> def set_title(self, title: str, url: str = None) -> None: <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> def set_timestamp( self, time: Union[str, datetime.datetime] = None, now: bool = False ) -> None: <NEW_LINE> <INDENT> if now: <NEW_LINE> <INDENT> self.timestamp = str(datetime.datetime.utcnow()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.timestamp = str(time) <NEW_LINE> <DEDENT> <DEDENT> def add_field(self, name: str, value: str, inline: bool = True) -> None: <NEW_LINE> <INDENT> field = {"name": name, "value": value, "inline": inline} <NEW_LINE> self.fields.append(field) <NEW_LINE> <DEDENT> def set_author(self, name: str, icon_url: str = None, url: str = None) -> None: <NEW_LINE> <INDENT> self.author = {"name": name, "icon_url": icon_url, "url": url} <NEW_LINE> <DEDENT> def set_thumbnail(self, url: str) -> None: <NEW_LINE> <INDENT> self.thumbnail = {"url": url} <NEW_LINE> <DEDENT> def set_image(self, url: str) -> None: <NEW_LINE> <INDENT> self.image = {"url": url} <NEW_LINE> <DEDENT> def set_footer(self, text: str, icon_url: str = None) -> None: <NEW_LINE> <INDENT> self.footer = {"text": text, "icon_url": icon_url} <NEW_LINE> <DEDENT> def to_dict(self) -> dict: <NEW_LINE> <INDENT> return { key: getattr(self, key) for key in self.__slots__ if getattr(self, key) is not None } | Class that represents a discord embed.
Parameters
-----------
\*\*title: str, optional
Defaults to :class:`None`.
The title of the embed.
\*\*description: str, optional
Defaults to :class:`None`.
The description of the embed.
\*\*url: str, optional
URL of the embed. It requires :attr:`title` to be set.
\*\*timestamp: str, optional
``ISO 8601`` timestamp of the embed. If set to a "now",
the current time is set as the timestamp.
\*\*color: int (or hex), optional
Color of the embed.
\*\*image_url: str, optional
URL of the image.
\*\*thumbnail_url: str, optional
URL of the thumbnail. | 6259904807d97122c4218038 |
class GlslBlockInOutStruct(GlslBlockInOut): <NEW_LINE> <INDENT> def __init__(self, layout, inout, type_name, members, name, size=0): <NEW_LINE> <INDENT> GlslBlockInOut.__init__(self, layout, inout) <NEW_LINE> self.__type_name = type_name <NEW_LINE> self.__members = members <NEW_LINE> self.__name = name <NEW_LINE> self.__size = size <NEW_LINE> self.__member_accesses = [] <NEW_LINE> name.setType(type_name) <NEW_LINE> self.addNamesDeclared(name) <NEW_LINE> self.addNamesUsed(name) <NEW_LINE> <DEDENT> def format(self, force): <NEW_LINE> <INDENT> ret = self.formatBase(force) <NEW_LINE> lst = "".join(map(lambda x: x.format(force), self.__members)) <NEW_LINE> ret += (" %s{%s}%s" % (self.__type_name.format(force), lst, self.__name.format(force))) <NEW_LINE> if self.__size: <NEW_LINE> <INDENT> ret += "[%s]" % (self.__size.format(force)) <NEW_LINE> <DEDENT> return ret + ";" <NEW_LINE> <DEDENT> def getMembers(self): <NEW_LINE> <INDENT> return self.__members <NEW_LINE> <DEDENT> def getMemberAccesses(self): <NEW_LINE> <INDENT> return self.__member_accesses <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.__name <NEW_LINE> <DEDENT> def getTypeName(self): <NEW_LINE> <INDENT> return self.__type_name <NEW_LINE> <DEDENT> def isMergableWith(self, op): <NEW_LINE> <INDENT> if not is_glsl_block_inout_struct(op): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.getTypeName() != op.getTypeName(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> members1 = sorted(op.getMembers()) <NEW_LINE> members2 = sorted(self.__members) <NEW_LINE> if len(members1) != len(members2): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for (ii, jj) in zip(members1, members2): <NEW_LINE> <INDENT> if ii.getName() != jj.getName(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def setMemberAccesses(self, lst): <NEW_LINE> <INDENT> self.__member_accesses = lst <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "InOutStruct('%s')" % (self.__name.getName()) | Input (attribute / varying) struct declaration block. | 62599048507cdc57c63a6133 |
class RankEnvironment(Environment): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, hyperparams): <NEW_LINE> <INDENT> super(RankEnvironment, self).__init__(hyperparams) <NEW_LINE> return <NEW_LINE> <DEDENT> def transit(self, state, action): <NEW_LINE> <INDENT> t = state[0] <NEW_LINE> Xt = state[1] <NEW_LINE> assert 0 <= action < len(Xt) <NEW_LINE> Xt_1 = copy.deepcopy(Xt) <NEW_LINE> del Xt_1[action] <NEW_LINE> state_new = (t+1, Xt_1) <NEW_LINE> return state_new <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def reward(self, state, action): <NEW_LINE> <INDENT> raise NotImplementedError("Must be implemented in subclass.") | Rank Environment superclass. | 6259904873bcbd0ca4bcb623 |
class APIAssert(BaseModel): <NEW_LINE> <INDENT> assert_choice = ( ("json","json"), ("status_code","status_code"), ("reg","reg"), ("contains","contains"), ) <NEW_LINE> api = models.ForeignKey(API, on_delete=models.CASCADE, verbose_name='接口', related_name="api_assert") <NEW_LINE> case = models.ForeignKey(TestCase, on_delete=models.CASCADE, verbose_name='用例', related_name="case_assert") <NEW_LINE> suite = models.ForeignKey(TestSuite, on_delete=models.CASCADE, verbose_name='用例集', related_name="suite_assert") <NEW_LINE> type = models.CharField(max_length=50, verbose_name='断言方式', choices=assert_choice) <NEW_LINE> pattern = models.CharField(max_length=50,blank=True, null=True, verbose_name='正则或者jsonpath表达式') <NEW_LINE> expect = models.CharField(max_length=50, verbose_name='预期结果') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.expect <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.expect <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = "gy_tms_assert" | 断言 | 6259904821a7993f00c672ff |
class FCBNQuadraticStateQFunction(chainer.Chain, StateQFunction): <NEW_LINE> <INDENT> def __init__(self, n_input_channels, n_dim_action, n_hidden_channels, n_hidden_layers, action_space, scale_mu=True, normalize_input=True): <NEW_LINE> <INDENT> self.n_input_channels = n_input_channels <NEW_LINE> self.n_hidden_layers = n_hidden_layers <NEW_LINE> self.n_hidden_channels = n_hidden_channels <NEW_LINE> assert action_space is not None <NEW_LINE> self.scale_mu = scale_mu <NEW_LINE> self.action_space = action_space <NEW_LINE> layers = {} <NEW_LINE> assert n_hidden_layers >= 1 <NEW_LINE> layers['hidden_layers'] = MLPBN( in_size=n_input_channels, out_size=n_hidden_channels, hidden_sizes=[n_hidden_channels] * (n_hidden_layers - 1), normalize_input=normalize_input) <NEW_LINE> layers['v'] = L.Linear(n_hidden_channels, 1) <NEW_LINE> layers['mu'] = L.Linear(n_hidden_channels, n_dim_action) <NEW_LINE> layers['mat_diag'] = L.Linear(n_hidden_channels, n_dim_action) <NEW_LINE> non_diag_size = n_dim_action * (n_dim_action - 1) // 2 <NEW_LINE> if non_diag_size > 0: <NEW_LINE> <INDENT> layers['mat_non_diag'] = L.Linear(n_hidden_channels, non_diag_size) <NEW_LINE> <DEDENT> super().__init__(**layers) <NEW_LINE> <DEDENT> def __call__(self, state): <NEW_LINE> <INDENT> h = self.hidden_layers(state) <NEW_LINE> v = self.v(h) <NEW_LINE> mu = self.mu(h) <NEW_LINE> if self.scale_mu: <NEW_LINE> <INDENT> mu = scale_by_tanh(mu, high=self.action_space.high, low=self.action_space.low) <NEW_LINE> <DEDENT> mat_diag = F.exp(self.mat_diag(h)) <NEW_LINE> if hasattr(self, 'mat_non_diag'): <NEW_LINE> <INDENT> mat_non_diag = self.mat_non_diag(h) <NEW_LINE> tril = lower_triangular_matrix(mat_diag, mat_non_diag) <NEW_LINE> mat = F.batch_matmul(tril, tril, transb=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mat = F.expand_dims(mat_diag ** 2, axis=2) <NEW_LINE> <DEDENT> return QuadraticActionValue( mu, mat, v, min_action=self.action_space.low, max_action=self.action_space.high) | Fully-connected state-input continuous Q-function.
Args:
n_input_channels: number of input channels
n_dim_action: number of dimensions of action space
n_hidden_channels: number of hidden channels
n_hidden_layers: number of hidden layers
action_space: action_space
scale_mu (bool): scale mu by applying tanh if True | 62599048a8ecb033258725a8 |
class PublishCommand(Command): <NEW_LINE> <INDENT> description = 'Build and publish the package.' <NEW_LINE> user_options = [] <NEW_LINE> @staticmethod <NEW_LINE> def status(s): <NEW_LINE> <INDENT> print('\033[1m{0}\033[0m'.format(s)) <NEW_LINE> <DEDENT> def initialize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.status('Removing previous builds…') <NEW_LINE> rmtree(os.path.join(here, 'dist')) <NEW_LINE> <DEDENT> except (OSError, IOError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.status('Building Source and Wheel (universal) distribution…') <NEW_LINE> os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable)) <NEW_LINE> self.status('Uploading the package to PyPi via Twine…') <NEW_LINE> os.system('twine upload dist/*') <NEW_LINE> self.status('Pushing git tags…') <NEW_LINE> os.system('git tag v{0}'.format(about['__version__'])) <NEW_LINE> os.system('git push --tags') <NEW_LINE> sys.exit() | Support setup.py publish. | 625990488a43f66fc4bf352b |
class TestPaymentMethodsResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testPaymentMethodsResponse(self): <NEW_LINE> <INDENT> model = kinow_client.models.payment_methods_response.PaymentMethodsResponse() | PaymentMethodsResponse unit test stubs | 6259904863b5f9789fe86503 |
class TopicCreateHandler(BaseHandler): <NEW_LINE> <INDENT> def post(self, category_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> topic = self.get_argument("create") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.render("error.html",message="题目不能为空") <NEW_LINE> <DEDENT> topicdao = TopicDao() <NEW_LINE> try: <NEW_LINE> <INDENT> sign = topicdao.add_topic(topic, category_id) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> sign = False <NEW_LINE> <DEDENT> return self.render("topic_middle.html", data = {"category_id":category_id, "data": sign}) | arguments - 所有的 GET 或 POST 的参数,字典类型,self.request.arguments.get(name, [])
files - 所有通过 multipart/form-data POST 请求上传的文件
path - 请求的路径( ? 之前的所有内容)
headers - 请求的开头信息 | 62599048711fe17d825e1669 |
class HTTPServer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.port = 19234 <NEW_LINE> tcp_server = socket.socket() <NEW_LINE> tcp_server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> tcp_server.bind(("", self.port)) <NEW_LINE> tcp_server.listen(128) <NEW_LINE> print("服务器已开启。。") <NEW_LINE> self.tcp_server = tcp_server <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> client_server, client_addr = self.tcp_server.accept() <NEW_LINE> g1 = gevent.spawn(self.client_handler, client_server) <NEW_LINE> if KeyboardInterrupt: <NEW_LINE> <INDENT> g1.join() <NEW_LINE> self.tcp_server.close() <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def client_handler(self, client_server): <NEW_LINE> <INDENT> request_data = client_server.recv(1024) <NEW_LINE> if not request_data: <NEW_LINE> <INDENT> print("请求已经断开") <NEW_LINE> client_server.close() <NEW_LINE> return <NEW_LINE> <DEDENT> request_head = request_data.decode().split("\r\n")[0] <NEW_LINE> result = re.match(r'\w+\s+(\S+)\s+HTTP/1\.(1|0)', request_head) <NEW_LINE> if result is None: <NEW_LINE> <INDENT> print("不符合HTTP协议") <NEW_LINE> client_server.close() <NEW_LINE> return <NEW_LINE> <DEDENT> resource = result.group(1) <NEW_LINE> if resource == "/": <NEW_LINE> <INDENT> resource = "/index.html" <NEW_LINE> <DEDENT> if resource.endswith(".html"): <NEW_LINE> <INDENT> self.dynamic_resource(client_server, resource) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.static_resource(resource, client_server) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def dynamic_resource(client_server, resource): <NEW_LINE> <INDENT> status, headers, response_body = _76_Frame.dynamic_resource({"PATH_INFO": resource}) <NEW_LINE> response_line = "HTTP/1.1 {}\r\n".format(status) <NEW_LINE> response_headers = "Server: PWS/1.1\r\n" <NEW_LINE> for header in headers: <NEW_LINE> <INDENT> response_headers += "{}: {}\r\n".format(*header) <NEW_LINE> <DEDENT> response_data = response_line + response_headers + "\r\n" + response_body <NEW_LINE> client_server.send(response_data.encode()) <NEW_LINE> client_server.close() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def static_resource(resource, client_server): <NEW_LINE> <INDENT> response_head = "Server: PWS/1.1\r\n" <NEW_LINE> response_line = "" <NEW_LINE> response_body = "" <NEW_LINE> try: <NEW_LINE> <INDENT> file = open("./static" + resource, "rb") <NEW_LINE> response_line = "HTTP/1.1 200 OK\r\n" <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> response_line = "HTTP/1.1 404 Page Not Found\r\n" <NEW_LINE> response_body = "<h>哎哟,页面出错了{}。。。。</h>".format(str(e)).encode() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response_body = file.read() <NEW_LINE> file.close() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> response_data = (response_line + response_head + "\r\n").encode() + response_body <NEW_LINE> <DEDENT> client_server.send(response_data) <NEW_LINE> client_server.close() | HTTP 服务器类 | 62599048d6c5a102081e34b2 |
class MomentumIntegral(BaseTwoIndexSymmetric): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def construct_array_contraction(contractions_one, contractions_two): <NEW_LINE> <INDENT> if not isinstance(contractions_one, GeneralizedContractionShell): <NEW_LINE> <INDENT> raise TypeError("`contractions_one` must be a `GeneralizedContractionShell` instance.") <NEW_LINE> <DEDENT> if not isinstance(contractions_two, GeneralizedContractionShell): <NEW_LINE> <INDENT> raise TypeError("`contractions_two` must be a `GeneralizedContractionShell` instance.") <NEW_LINE> <DEDENT> output = _compute_differential_operator_integrals( np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]), contractions_one.coord, contractions_one.angmom_components_cart, contractions_one.exps, contractions_one.coeffs, contractions_one.norm_prim_cart, contractions_two.coord, contractions_two.angmom_components_cart, contractions_two.exps, contractions_two.coeffs, contractions_two.norm_prim_cart, ) <NEW_LINE> return -1j * np.transpose(output, (1, 2, 3, 4, 0)) | Class for obtaining the momentum integral.
Attributes
----------
_axes_contractions : tuple of tuple of GeneralizedContractionShell
Sets of contractions associated with each axis of the array.
contractions : tuple of GeneralizedContractionShell
Contractions that are associated with the first and second indices of the array.
Property of `MomentumIntegral`.
Methods
-------
__init__(self, contractions)
Initialize.
construct_array_contraction(contractions_one, contractions_two) :
np.ndarray(M_1, L_cart_1, M_2, L_cart_2, 3)
Return the integral over the momentum operator associated with a
`GeneralizedContractionShell` instance.
`M_1` is the number of segmented contractions with the same exponents (and angular
momentum) associated with the first index.
`L_cart_1` is the number of Cartesian contractions for the given angular momentum
associated with the first index.
`M_2` is the number of segmented contractions with the same exponents (and angular
momentum) associated with the second index.
`L_cart_2` is the number of Cartesian contractions for the given angular momentum
associated with the second index.
construct_array_cartesian(self) : np.ndarray(K_cart, K_cart, 3)
Return the integral over the momentum operator associated with Cartesian Gaussians.
`K_cart` is the total number of Cartesian contractions within the instance.
construct_array_spherical(self) : np.ndarray(K_sph, K_sph, 3)
Return the integral over the momentum operator associated with spherical Gaussians (atomic
orbitals).
`K_sph` is the total number of spherical contractions within the instance.
construct_array_mix(self, coord_types) : np.ndarray(K_cont, K_cont, 3)
Return the integral over the momentum operator associated with the contraction in the given
coordinate system.
`K_cont` is the total number of contractions within the given basis set.
construct_array_spherical_lincomb(self, transform) : np.ndarray(K_orbs, K_orbs, 3)
Return the integral over the momentum operator associated with linear combinations of
spherical Gaussians (linear combinations of atomic orbitals).
`K_orbs` is the number of basis functions produced after the linear combinations. | 62599048b830903b9686ee46 |
class player(object): <NEW_LINE> <INDENT> name = '' <NEW_LINE> _account = 0 <NEW_LINE> _bet = {} <NEW_LINE> strategy = None <NEW_LINE> def __init__(self, player_name, player_account, player_strategy): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> self.name = player_name <NEW_LINE> self._account = player_account <NEW_LINE> self.strategy = player_strategy <NEW_LINE> self._bet = copy.deepcopy(player_strategy.get_first_bet(self)) <NEW_LINE> self.account -= self.strategy.get_bet_sum() <NEW_LINE> <DEDENT> def get_account(self): <NEW_LINE> <INDENT> return self._account <NEW_LINE> <DEDENT> def set_account(self, value): <NEW_LINE> <INDENT> self._account = value <NEW_LINE> report_generator.log_player_account_change(value, self.account) <NEW_LINE> <DEDENT> def get_bet(self): <NEW_LINE> <INDENT> return self._bet <NEW_LINE> <DEDENT> def set_bet(self, value): <NEW_LINE> <INDENT> self._bet = value <NEW_LINE> report_generator.log_player_bet_change(value) <NEW_LINE> <DEDENT> account = property(get_account, set_account) <NEW_LINE> bet = property(get_bet, set_bet) <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> self.name = '' <NEW_LINE> self._account = 0 <NEW_LINE> self.strategy = None <NEW_LINE> <DEDENT> def get_next_bet(self): <NEW_LINE> <INDENT> self.bet = copy.deepcopy(self.strategy.get_bet(self)) <NEW_LINE> self.account -= self.strategy.get_bet_sum() <NEW_LINE> <DEDENT> def set_payment(self, payment): <NEW_LINE> <INDENT> self.account += payment <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_bet(self): <NEW_LINE> <INDENT> return self.bet | Both play and croupier has a state
play: betting, no more bets | 6259904850485f2cf55dc31f |
class MD5PasswordHasher(BasePasswordHasher): <NEW_LINE> <INDENT> algorithm = "md5" <NEW_LINE> @password_max_length(MAXIMUM_PASSWORD_LENGTH) <NEW_LINE> def encode(self, password, salt): <NEW_LINE> <INDENT> assert password is not None <NEW_LINE> assert salt and '$' not in salt <NEW_LINE> hash = hashlib.md5(force_bytes(salt + password)).hexdigest() <NEW_LINE> return "%s$%s$%s" % (self.algorithm, salt, hash) <NEW_LINE> <DEDENT> @password_max_length(MAXIMUM_PASSWORD_LENGTH) <NEW_LINE> def verify(self, password, encoded): <NEW_LINE> <INDENT> algorithm, salt, hash = encoded.split('$', 2) <NEW_LINE> assert algorithm == self.algorithm <NEW_LINE> encoded_2 = self.encode(password, salt) <NEW_LINE> return constant_time_compare(encoded, encoded_2) <NEW_LINE> <DEDENT> def safe_summary(self, encoded): <NEW_LINE> <INDENT> algorithm, salt, hash = encoded.split('$', 2) <NEW_LINE> assert algorithm == self.algorithm <NEW_LINE> return OrderedDict([ (_('algorithm'), algorithm), (_('salt'), mask_hash(salt, show=2)), (_('hash'), mask_hash(hash)), ]) | The Salted MD5 password hashing algorithm (not recommended) | 6259904876d4e153a661dc41 |
class CoordinateTransformer: <NEW_LINE> <INDENT> def __init__( self, angles_in_degrees=True, nexus_coords=None, origin=np.array([0.0, 0.0, 0.0]), ): <NEW_LINE> <INDENT> self.angles_in_degrees = angles_in_degrees <NEW_LINE> if nexus_coords is None: <NEW_LINE> <INDENT> nexus_coords = ["x", "y", "z"] <NEW_LINE> <DEDENT> self.default_coords = nexus_coords == ["x", "y", "z"] <NEW_LINE> self.nexus_coords_signs = np.array( [-1.0 if self.__is_negative(axis) else 1.0 for axis in nexus_coords] ).astype(float) <NEW_LINE> unsigned_nexus_coords = [ coord[1:] if self.__is_negative(coord) else coord for coord in nexus_coords ] <NEW_LINE> self.nexus_coords_order = np.array( [ unsigned_nexus_coords.index("x"), unsigned_nexus_coords.index("y"), unsigned_nexus_coords.index("z"), ] ) <NEW_LINE> self.origin = np.array(origin) <NEW_LINE> <DEDENT> def get_angle_in_degrees(self, angle): <NEW_LINE> <INDENT> return angle if self.angles_in_degrees else np.rad2deg(angle) <NEW_LINE> <DEDENT> def __make_relative_to_origin(self, vector, top_level): <NEW_LINE> <INDENT> if top_level: <NEW_LINE> <INDENT> return vector - self.origin <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return vector <NEW_LINE> <DEDENT> <DEDENT> def get_nexus_coordinates(self, vector, top_level=False): <NEW_LINE> <INDENT> vector = np.array(vector) <NEW_LINE> if self.default_coords: <NEW_LINE> <INDENT> vector = self.__make_relative_to_origin(vector, top_level) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vector = np.multiply(vector, self.nexus_coords_signs) <NEW_LINE> vector = vector[self.nexus_coords_order] <NEW_LINE> vector = self.__make_relative_to_origin(vector, top_level) <NEW_LINE> <DEDENT> return vector <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __is_negative(direction): <NEW_LINE> <INDENT> return direction[0] == "-" <NEW_LINE> <DEDENT> def spherical_to_cartesian(self, rthetaphi): <NEW_LINE> <INDENT> r = rthetaphi[0] <NEW_LINE> theta = rthetaphi[1] <NEW_LINE> phi = rthetaphi[2] <NEW_LINE> if self.angles_in_degrees: <NEW_LINE> <INDENT> theta = np.deg2rad(theta) <NEW_LINE> phi = np.deg2rad(phi) <NEW_LINE> <DEDENT> x = r * np.sin(theta) * np.cos(phi) <NEW_LINE> y = r * np.sin(theta) * np.sin(phi) <NEW_LINE> z = r * np.cos(theta) <NEW_LINE> return [x, y, z] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def cartesian_to_spherical(xyz): <NEW_LINE> <INDENT> x = xyz[0] <NEW_LINE> y = xyz[1] <NEW_LINE> z = xyz[2] <NEW_LINE> r = np.sqrt(x * x + y * y + z * z) <NEW_LINE> theta = np.arccos(z / r) * 180 / np.pi <NEW_LINE> phi = np.arctan2(y, x) * 180 / np.pi <NEW_LINE> return [r, theta, phi] | Transform between IDF and NeXus, units and coordinates | 62599048462c4b4f79dbcd96 |
class TestValidateRelease(BasePyTestCase): <NEW_LINE> <INDENT> def test_invalid(self): <NEW_LINE> <INDENT> request = mock.Mock() <NEW_LINE> request.db = self.db <NEW_LINE> request.errors = Errors() <NEW_LINE> request.validated = {'release': 'invalid'} <NEW_LINE> validators.validate_release(request) <NEW_LINE> assert request.errors == [ {'location': 'querystring', 'name': 'release', 'description': 'Invalid release specified: invalid'} ] <NEW_LINE> assert request.errors.status == exceptions.HTTPBadRequest.code | Test the validate_release() function. | 6259904873bcbd0ca4bcb624 |
class GroupBaseAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> save_on_top = True <NEW_LINE> search_fields = ['name', 'aliases__name', 'url', 'aliases__url'] <NEW_LINE> list_display = ['name', 'member_count', 'vouched_member_count'] <NEW_LINE> list_display_links = ['name'] <NEW_LINE> list_filter = [EmptyGroupFilter] <NEW_LINE> readonly_fields = ['url'] <NEW_LINE> def get_form(self, request, obj=None, **kwargs): <NEW_LINE> <INDENT> defaults = {} <NEW_LINE> if obj is None: <NEW_LINE> <INDENT> defaults['form'] = self.add_form <NEW_LINE> <DEDENT> defaults.update(kwargs) <NEW_LINE> return super(GroupBaseAdmin, self).get_form(request, obj, **defaults) <NEW_LINE> <DEDENT> def queryset(self, request): <NEW_LINE> <INDENT> return (super(GroupBaseAdmin, self) .queryset(request) .annotate(member_count=Count('members'), vouched_member_count=Sum('members__is_vouched'))) <NEW_LINE> <DEDENT> def member_count(self, obj): <NEW_LINE> <INDENT> return obj.member_count <NEW_LINE> <DEDENT> member_count.admin_order_field = 'member_count' <NEW_LINE> def vouched_member_count(self, obj): <NEW_LINE> <INDENT> if obj.vouched_member_count: <NEW_LINE> <INDENT> return int(obj.vouched_member_count) <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> vouched_member_count.admin_order_field = 'vouched_member_count' | GroupBase Admin. | 6259904829b78933be26aa8e |
class Meta: <NEW_LINE> <INDENT> verbose_name = 'Project Master' <NEW_LINE> verbose_name_plural = 'Project Masters' | Metadata for class ProjectMaster | 62599048d10714528d69f059 |
class CalendarDatePicker(FormField): <NEW_LINE> <INDENT> template = "tw2.forms.templates.calendar" <NEW_LINE> calendar_lang = twc.Param("Default Language to use in the Calendar", default='en') <NEW_LINE> not_empty = twc.Param("Allow this field to be empty", default=True) <NEW_LINE> button_text = twc.Param("Text to display on Button", default="Choose") <NEW_LINE> date_format = twc.Param("Date Display Format", default="%m/%d/%Y") <NEW_LINE> picker_shows_time = twc.Param('Picker Shows Time', default=False) <NEW_LINE> tzinfo = twc.Param('Time Zone Information', default=None) <NEW_LINE> setup_options = twc.Param('Calendar.setup(...) options', default={}) <NEW_LINE> default = twc.Param( 'Default value (datetime) for the widget. If set to a function, ' + 'it will be called each time before displaying.', default=datetime.now) <NEW_LINE> def get_calendar_lang_file_link(self, lang): <NEW_LINE> <INDENT> fname = 'static/calendar/lang/calendar-%s.js' % lang.lower() <NEW_LINE> return twc.JSLink(modname='tw2.forms', filename=fname) <NEW_LINE> <DEDENT> def __init__(self, *args, **kw): <NEW_LINE> <INDENT> if self.validator is None: <NEW_LINE> <INDENT> self.validator = twc.DateTimeValidator( format=self.date_format, ) <NEW_LINE> <DEDENT> super(CalendarDatePicker, self).__init__(*args, **kw) <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> super(CalendarDatePicker, self).prepare() <NEW_LINE> self.resources = [calendar_css, calendar_js, calendar_setup] <NEW_LINE> if not self.value: <NEW_LINE> <INDENT> if callable(self.default): <NEW_LINE> <INDENT> self.value = self.default() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.value = self.default <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> self.strdate = self.value.strftime(self.date_format) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self.strdate = self.value <NEW_LINE> <DEDENT> self.resources.append( self.get_calendar_lang_file_link(self.calendar_lang) ) | Uses a javascript calendar system to allow picking of calendar dates.
The date_format is in mm/dd/yyyy unless otherwise specified | 62599048d53ae8145f9197f8 |
class gitpostapplypatchinputparser(basegitinputparser): <NEW_LINE> <INDENT> def parse(self): <NEW_LINE> <INDENT> resolver = gitinforesolver() <NEW_LINE> return resolver | Input parser for the 'post-applypatch' phase
Available fields:
- reporoot (str) => root of the repo
- head (str) => sha1 of HEAD | 6259904807f4c71912bb07ca |
class BaseFormat(object): <NEW_LINE> <INDENT> def __init__(self, width=100, pagesize=1000): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.pagesize = pagesize <NEW_LINE> <DEDENT> def write(self, results, ostream): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> for result in results: <NEW_LINE> <INDENT> self.format(result, ostream) <NEW_LINE> count += 1 <NEW_LINE> if count > self.pagesize: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def format(self, result, ostream): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def format_field(self, field): <NEW_LINE> <INDENT> if isinstance(field, Decimal): <NEW_LINE> <INDENT> if field % 1 == 0: <NEW_LINE> <INDENT> return unicode(int(field)) <NEW_LINE> <DEDENT> return unicode(float(field)) <NEW_LINE> <DEDENT> pretty = repr(field) <NEW_LINE> if pretty.startswith("u'"): <NEW_LINE> <INDENT> return pretty[1:] <NEW_LINE> <DEDENT> return pretty | Base class for formatters | 6259904826238365f5fadef4 |
class TestReviewFileConversions(unittest.TestCase): <NEW_LINE> <INDENT> layer = RECENSIO_INTEGRATION_TESTING <NEW_LINE> level = 10 <NEW_LINE> def test_nothing(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def xxxtest_review_with_custom_pdf_files(self): <NEW_LINE> <INDENT> portal = self.layer["portal"] <NEW_LINE> pm = portal.portal_membership <NEW_LINE> fake_member_folder = pm.getMembersFolder().get("fake_member") <NEW_LINE> online_resource = fake_member_folder.listFolderContents( contentFilter={"portal_type": "Presentation Online Resource"} )[0] <NEW_LINE> self.assertTrue( len(online_resource.pagePictures) > 0 and len(online_resource.pagePictures[0]) > 1000, msg=( "Presentation Online Resource: %s " "The generated images for previewing the " "online resource haven't been generated" % (online_resource.absolute_url()) ), ) <NEW_LINE> issue = portal["sample-reviews"].newspaperb.summer["issue-2"] <NEW_LINE> review_id = issue.objectIds()[0] <NEW_LINE> review = issue[review_id] <NEW_LINE> self.assertTrue( review.pdf.get_size() > 0, msg=( "Review: %s " "doesn't have a pdf file attached." % review.absolute_url() ), ) <NEW_LINE> self.assertTrue( review.doc.get_size() > 0, msg=( "Review: %s " "doesn't have a doc file attached." % review.absolute_url() ), ) <NEW_LINE> self.assertFalse( hasattr(review, "generatedPdf"), msg=( "Review: %s " "A pdf has been generated, even though this " "review has a custom pdf (this is wrong)." % review.absolute_url() ), ) <NEW_LINE> self.assertTrue( review.pdf.blob == review.get_review_pdf()["blob"], msg=( "Review: %s " "get_review_pdf doesn't return the custom pdf." % review.absolute_url() ), ) <NEW_LINE> self.assertTrue( len(review.pagePictures) > 0 and len(review.pagePictures[0]) > 1000, msg=( "Review: %s " "The generated images for previewing the review " "haven't been generated" % (review.absolute_url()) ), ) <NEW_LINE> review.setPdf(None) <NEW_LINE> self.assertTrue( review.getPdf().get_size() == 0, msg=( "Review: %s " "still has an attached custom pdf file, " "this should have been removed." % review.absolute_url() ), ) <NEW_LINE> request = makerequest.makerequest(review) <NEW_LINE> event = ObjectEditedEvent(review, request) <NEW_LINE> zope.event.notify(event) <NEW_LINE> self.assertTrue( hasattr(review, "generatedPdf"), msg=( "Review: %s " "A pdf has not successfully been generated." % review.absolute_url() ), ) <NEW_LINE> self.assertTrue( review.generatedPdf == review.get_review_pdf()["blob"], msg=( "Review: %s get_review_pdf " "is not returning the correct pdf." % review.absolute_url() ), ) <NEW_LINE> review.setDoc(None) <NEW_LINE> self.assertTrue( review.getDoc().get_size() == 0, msg=( "Review: %s " "The attached doc file has not been " "successfully removed." % review.absolute_url() ), ) <NEW_LINE> request = makerequest.makerequest(review) <NEW_LINE> event = ObjectEditedEvent(review, request) <NEW_LINE> zope.event.notify(event) | Test file conversions.
Test the file conversions (Word/HTML->PDF) that take place
when a Review is added or edited. | 62599048d99f1b3c44d06a32 |
class IsOwnerOrAdmin(permissions.BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> if "user" in request.data: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(username=request.data["user"]) <NEW_LINE> return ( user == request.user or request.user.is_superuser or request.user.role in [r.value for r in [UserRoles.ADMIN]] ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if bool( request.user and ( request.user.is_superuser or request.user.role in [r.value for r in [UserRoles.ADMIN]] ) ): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.user == request.user | Object-level permission to only allow owners of an object to edit it.
Assumes the model instance has an `owner` attribute. | 625990488a43f66fc4bf352d |
class myPlot: <NEW_LINE> <INDENT> def __init__(self, ax, xlabel='', ylabel='', title='', legend=None): <NEW_LINE> <INDENT> self.legend = legend <NEW_LINE> self.ax = ax <NEW_LINE> self.colors = ['b', 'g', 'r', 'c', 'm', 'y', 'b'] <NEW_LINE> self.line_styles = ['-', '-', '--', '-.', ':'] <NEW_LINE> self.line = [] <NEW_LINE> self.ax.set_ylabel(ylabel) <NEW_LINE> self.ax.set_xlabel(xlabel) <NEW_LINE> self.ax.set_title(title) <NEW_LINE> self.ax.grid(True) <NEW_LINE> self.init = True <NEW_LINE> <DEDENT> def updatePlot(self, time, data): <NEW_LINE> <INDENT> if self.init == True: <NEW_LINE> <INDENT> for i in range(len(data)): <NEW_LINE> <INDENT> self.line.append(Line2D(time, data[i], color=self.colors[np.mod(i, len(self.colors) - 1)], ls=self.line_styles[np.mod(i, len(self.line_styles) - 1)], label=self.legend if self.legend != None else None)) <NEW_LINE> self.ax.add_line(self.line[i]) <NEW_LINE> <DEDENT> self.init = False <NEW_LINE> if self.legend != None: <NEW_LINE> <INDENT> plt.legend(handles=self.line) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(len(self.line)): <NEW_LINE> <INDENT> self.line[i].set_xdata(time) <NEW_LINE> self.line[i].set_ydata(data[i]) <NEW_LINE> <DEDENT> <DEDENT> self.ax.relim() <NEW_LINE> self.ax.autoscale() | Create each individual subplot. | 6259904845492302aabfd86a |
class GraphListWidget(iKeyListenerWidget): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self._next_id = 0 <NEW_LINE> self.graphs = dict() <NEW_LINE> self.box = QVBoxLayout() <NEW_LINE> self.setLayout(self.box) <NEW_LINE> <DEDENT> def add_graph_plot_instance(self): <NEW_LINE> <INDENT> current_id = self.next_id <NEW_LINE> graph_with_menu = PlotWithCommands(self, current_id) <NEW_LINE> self.graphs[current_id] = graph_with_menu <NEW_LINE> graph_with_menu.graph_widget.multi_plot([np.random.random(100)]) <NEW_LINE> graph_with_menu.btn_remove_plot.clicked.connect(lambda: self.remove_plot(graph_with_menu)) <NEW_LINE> self.box.addWidget(graph_with_menu) <NEW_LINE> self.setLayout(self.box) <NEW_LINE> <DEDENT> @property <NEW_LINE> def next_id(self): <NEW_LINE> <INDENT> _id = self._next_id <NEW_LINE> self._next_id += 1 <NEW_LINE> return _id <NEW_LINE> <DEDENT> def remove_plot(self, graph_with_menu): <NEW_LINE> <INDENT> del self.graphs[graph_with_menu._id] <NEW_LINE> self.box.removeWidget(graph_with_menu) <NEW_LINE> sip.delete(graph_with_menu) <NEW_LINE> graph_with_menu = None <NEW_LINE> <DEDENT> def regather_graph_data(self): <NEW_LINE> <INDENT> for graph in self.graphs.values(): <NEW_LINE> <INDENT> data = [np.random.random(100)] <NEW_LINE> graph.graph_widget.multi_plot(data) | A widget for managing many different graphs | 6259904845492302aabfd86b |
class OpenAIGPTEmbedderModule(HuggingfaceTransformersEmbedderModule): <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> super(OpenAIGPTEmbedderModule, self).__init__(args) <NEW_LINE> self.model = transformers.OpenAIGPTModel.from_pretrained( args.input_module, cache_dir=self.cache_dir, output_hidden_states=True ) <NEW_LINE> self.max_pos = self.model.config.n_positions <NEW_LINE> self.tokenizer = transformers.OpenAIGPTTokenizer.from_pretrained( args.input_module, cache_dir=self.cache_dir ) <NEW_LINE> self._pad_id = self.tokenizer.convert_tokens_to_ids("\n</w>") <NEW_LINE> self._unk_id = self.tokenizer.convert_tokens_to_ids("<unk>") <NEW_LINE> special_tokens = {"bos_token": "<start>", "sep_token": "<delim>", "cls_token": "<extract>"} <NEW_LINE> self.tokenizer.add_special_tokens(special_tokens) <NEW_LINE> self.model.resize_token_embeddings(len(self.tokenizer)) <NEW_LINE> self.parameter_setup(args) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def apply_boundary_tokens(s1, s2=None, get_offset=False): <NEW_LINE> <INDENT> if s2: <NEW_LINE> <INDENT> s = ["<start>"] + s1 + ["<delim>"] + s2 + ["<extract>"] <NEW_LINE> if get_offset: <NEW_LINE> <INDENT> return s, 1, len(s1) + 2 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> s = ["<start>"] + s1 + ["<extract>"] <NEW_LINE> if get_offset: <NEW_LINE> <INDENT> return s, 1 <NEW_LINE> <DEDENT> <DEDENT> return s <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def apply_lm_boundary_tokens(s1, get_offset=False): <NEW_LINE> <INDENT> s = ["\n</w>"] + s1 + ["\n</w>"] <NEW_LINE> if get_offset: <NEW_LINE> <INDENT> return s, 1 <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def forward(self, sent: Dict[str, torch.LongTensor], task_name: str = "") -> torch.FloatTensor: <NEW_LINE> <INDENT> ids, input_mask = self.correct_sent_indexing(sent) <NEW_LINE> hidden_states, lex_seq = [], None <NEW_LINE> if self.output_mode not in ["none", "top"]: <NEW_LINE> <INDENT> lex_seq = self.model.tokens_embed(ids) <NEW_LINE> <DEDENT> if self.output_mode != "only": <NEW_LINE> <INDENT> _, hidden_states = self.model(ids) <NEW_LINE> <DEDENT> return self.prepare_output(lex_seq, hidden_states, input_mask) <NEW_LINE> <DEDENT> def get_pretrained_lm_head(self, args): <NEW_LINE> <INDENT> model_with_lm_head = transformers.OpenAIGPTLMHeadModel.from_pretrained( self.input_module, cache_dir=self.cache_dir ) <NEW_LINE> lm_head = model_with_lm_head.lm_head <NEW_LINE> lm_head.weight = self.model.tokens_embed.weight[: lm_head.weight.size()[0]] <NEW_LINE> return nn.Sequential(lm_head, nn.LogSoftmax(dim=-1)) | Wrapper for OpenAI GPT module to fit into jiant APIs.
Check HuggingfaceTransformersEmbedderModule for function definitions | 6259904891af0d3eaad3b1bd |
class PumpControlPi(PumpControl): <NEW_LINE> <INDENT> def __init__(self, gpioId): <NEW_LINE> <INDENT> super(PumpControlPi,self).__init__(gpioId) <NEW_LINE> GPIO.setmode(GPIO.BCM) <NEW_LINE> GPIO.setup(gpioId, GPIO.OUT) <NEW_LINE> GPIO.output(gpioId, 0) <NEW_LINE> <DEDENT> def runPump(self, timeWait): <NEW_LINE> <INDENT> GPIO.output(self.gpioId, 1) <NEW_LINE> time.sleep(timeWait) <NEW_LINE> GPIO.output(self.gpioId, 0) | Starts pump for time range | 6259904824f1403a92686299 |
class Dense(Layer): <NEW_LINE> <INDENT> def __init__(self, n_neurons: int, activation: Operation = Sigmoid()): <NEW_LINE> <INDENT> super().__init__(n_neurons) <NEW_LINE> self.activation = activation <NEW_LINE> <DEDENT> def _setup_layer(self, input_: np.ndarray): <NEW_LINE> <INDENT> if self.seed: <NEW_LINE> <INDENT> np.random.seed(self.seed) <NEW_LINE> <DEDENT> self.params = [] <NEW_LINE> self.params.append(np.random.randn(input_.shape[1], self.n_neurons)) <NEW_LINE> self.params.append(np.random.randn(1, self.n_neurons)) <NEW_LINE> self.operations = [WeightTransform(self.params[0]), BiasAdd(self.params[1]), self.activation] | Fully connected layer | 62599048711fe17d825e166a |
class UserDb: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._creds = {} <NEW_LINE> <DEDENT> def add(self, authid, authrole, secret, salt = None): <NEW_LINE> <INDENT> if salt: <NEW_LINE> <INDENT> key = auth.derive_key(secret, salt) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = secret <NEW_LINE> <DEDENT> self._creds[authid] = (salt, key, authrole) <NEW_LINE> return self._creds[authid] <NEW_LINE> <DEDENT> def get(self, authid): <NEW_LINE> <INDENT> return defer.succeed(self._creds.get(authid, (None, None, None))) | A fake user database. | 62599048379a373c97d9a3c3 |
class DataLoader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.availabe_datasets = { 'H2_lique': DataSetH2Lique, 'HD_lipovka': DataSetHDLipovka, 'H2_wrathmall': DataSetH2Wrathmall, 'H2_low_energy_levels': DataSetH2Glover, 'two_level_1': DataSetTwoLevel_1, 'three_level_1': DataSetThreeLevel_1, 'HD_galileo_project': DataSetHDGalileoProject, 'HeH2': DataSetHeH2 } <NEW_LINE> <DEDENT> def load(self, name): <NEW_LINE> <INDENT> retval = self.availabe_datasets.get(name)() <NEW_LINE> if retval is None: <NEW_LINE> <INDENT> msg = 'not data loader defined for {}'.format(name) <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return retval | Load various data sets. | 62599048a79ad1619776b419 |
class Node: <NEW_LINE> <INDENT> def __init__(self, value, distanceL=0, right=0, distanceR = 0, left=0): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self.distanceL = distanceL <NEW_LINE> self.distanceR = distanceR <NEW_LINE> self.left = left <NEW_LINE> self.right = right <NEW_LINE> <DEDENT> def set_right(self, nodeR): <NEW_LINE> <INDENT> self.right = nodeR <NEW_LINE> nodeR.left = self <NEW_LINE> <DEDENT> def set_left(self, nodeL): <NEW_LINE> <INDENT> self.left = nodeL <NEW_LINE> nodeL.right = self <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.value) | Node Class | 6259904829b78933be26aa8f |
class Main: <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.config() <NEW_LINE> sys.exit(self.run()) <NEW_LINE> <DEDENT> except (EOFError, KeyboardInterrupt): <NEW_LINE> <INDENT> sys.exit(114) <NEW_LINE> <DEDENT> except SystemExit as exception: <NEW_LINE> <INDENT> sys.exit(exception) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def config() -> None: <NEW_LINE> <INDENT> if hasattr(signal, 'SIGPIPE'): <NEW_LINE> <INDENT> signal.signal(signal.SIGPIPE, signal.SIG_DFL) <NEW_LINE> <DEDENT> if os.name == 'nt': <NEW_LINE> <INDENT> argv = [] <NEW_LINE> for arg in sys.argv: <NEW_LINE> <INDENT> files = glob.glob(arg) <NEW_LINE> if files: <NEW_LINE> <INDENT> argv.extend(files) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> argv.append(arg) <NEW_LINE> <DEDENT> <DEDENT> sys.argv = argv <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _create(file: str, link: str) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(link, 'w', encoding='utf-8', newline='\n') as ofile: <NEW_LINE> <INDENT> print("#!/usr/bin/env bash", file=ofile) <NEW_LINE> print("#", file=ofile) <NEW_LINE> print("# fwrapper.py generated script", file=ofile) <NEW_LINE> print("#\n", file=ofile) <NEW_LINE> print('MYDIR=$(dirname "$0")', file=ofile) <NEW_LINE> if file == os.path.abspath(file): <NEW_LINE> <INDENT> directory = os.path.dirname(file) <NEW_LINE> print( 'PATH=$(echo "$PATH" | ' f'sed -e "s@$MYDIR@{directory}@")', file=ofile, ) <NEW_LINE> print("export PATH\n", file=ofile) <NEW_LINE> print(f'exec "{file}" "$@"', file=ofile) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(f'exec "$MYDIR/{file}" "$@"', file=ofile) <NEW_LINE> <DEDENT> <DEDENT> os.chmod(link, int('755', 8)) <NEW_LINE> file_time = os.path.getmtime(file) <NEW_LINE> os.utime(link, (file_time, file_time)) <NEW_LINE> <DEDENT> except OSError as exception: <NEW_LINE> <INDENT> raise SystemExit( f'{sys.argv[0]}: Cannot create "{link}" wrapper file.', ) from exception <NEW_LINE> <DEDENT> <DEDENT> def run(self) -> int: <NEW_LINE> <INDENT> options = Options() <NEW_LINE> self._files = options.get_files() <NEW_LINE> for file in self._files: <NEW_LINE> <INDENT> if not os.path.isfile(file): <NEW_LINE> <INDENT> raise SystemExit( f'{sys.argv[0]}: Cannot find "{file}" file.', ) <NEW_LINE> <DEDENT> link = os.path.basename(file) <NEW_LINE> if os.path.exists(link): <NEW_LINE> <INDENT> print(f'Updating "{link}" wrapper for "{file}"...') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(f'Creating "{link}" wrapper for "{file}"...') <NEW_LINE> <DEDENT> self._create(file, link) <NEW_LINE> <DEDENT> return 0 | Main class | 625990480a366e3fb87ddd7f |
class Text(six.text_type): <NEW_LINE> <INDENT> def __new__(cls, arg=None, encoding=None): <NEW_LINE> <INDENT> if arg is None: <NEW_LINE> <INDENT> arg = u'' <NEW_LINE> <DEDENT> if isinstance(arg, six.text_type): <NEW_LINE> <INDENT> if encoding is not None: <NEW_LINE> <INDENT> raise TypeError('Text() with a unicode argument ' 'should not specify an encoding') <NEW_LINE> <DEDENT> return super(Text, cls).__new__(cls, arg) <NEW_LINE> <DEDENT> if isinstance(arg, bytes): <NEW_LINE> <INDENT> if encoding is None: <NEW_LINE> <INDENT> encoding = 'ascii' <NEW_LINE> <DEDENT> return super(Text, cls).__new__(cls, arg, encoding) <NEW_LINE> <DEDENT> raise TypeError('Text() argument should be str or unicode, not %s' % type(arg).__name__) | A long string type.
Strings of any length can be stored in the datastore using this
type. It behaves identically to the Python unicode type, except for
the constructor, which only accepts str and unicode arguments. | 6259904896565a6dacd2d956 |
class Hobby(atom.core.XmlElement): <NEW_LINE> <INDENT> _qname = CONTACTS_TEMPLATE % 'hobby' | Describes an ID of the contact in an external system of some kind.
This element may be repeated. | 62599048435de62698e9d19f |
class IntegrationTestCase(TestCase): <NEW_LINE> <INDENT> def _flash(self): <NEW_LINE> <INDENT> return self.response.context[CONTEXT_VAR] <NEW_LINE> <DEDENT> def test_session_state_for_unused_flash(self): <NEW_LINE> <INDENT> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse(_SESSION_KEY in self.client.session) <NEW_LINE> <DEDENT> def test_session_state_for_used_flash(self): <NEW_LINE> <INDENT> self.response = self.client.get(reverse(views.set_flash_var)) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertTrue(_SESSION_KEY in self.client.session) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse(_SESSION_KEY in self.client.session) <NEW_LINE> <DEDENT> def test_default_lifecycle(self): <NEW_LINE> <INDENT> self.response = self.client.get(reverse(views.set_flash_var)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse('message' in self._flash()) <NEW_LINE> <DEDENT> def test_keep_lifecycle(self): <NEW_LINE> <INDENT> self.response = self.client.get(reverse(views.set_flash_var)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.keep_var)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse('message' in self._flash()) <NEW_LINE> <DEDENT> def test_now_lifecycle(self): <NEW_LINE> <INDENT> self.response = self.client.get(reverse(views.set_now_var)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse('message' in self._flash()) <NEW_LINE> <DEDENT> def test_discard_lifecycle(self): <NEW_LINE> <INDENT> self.response = self.client.get(reverse(views.discard_var)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse('message' in self._flash()) <NEW_LINE> <DEDENT> def test_multiple_variables_lifecycle(self): <NEW_LINE> <INDENT> self.response = self.client.get(reverse(views.set_flash_var)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.response = self.client.get(reverse(views.set_another_flash_var)) <NEW_LINE> self.assertEqual('Message', self._flash()['message']) <NEW_LINE> self.assertEqual('Another message', self._flash()['anotherMessage']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse('message' in self._flash()) <NEW_LINE> self.assertEqual('Another message', self._flash()['anotherMessage']) <NEW_LINE> self.response = self.client.get(reverse(views.render_template)) <NEW_LINE> self.assertFalse('message' in self._flash()) <NEW_LINE> self.assertFalse('anotherMessage' in self._flash()) <NEW_LINE> <DEDENT> def test_replace_flash_scope(self): <NEW_LINE> <INDENT> request = lambda: self.client.get(reverse(views.replace_flash)) <NEW_LINE> self.assertRaises(TypeError, request) | Test the middleware and the context processors working within a real
Django application. | 62599048507cdc57c63a6137 |
class PyHttpx(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/encode/httpx" <NEW_LINE> pypi = "httpx/httpx-0.15.2.tar.gz" <NEW_LINE> version('0.15.2', sha256='713a2deaf96d85bbd4a1fbdf0edb27d6b4ee2c9aaeda8433042367e4b9e1628d') <NEW_LINE> version('0.11.1', sha256='7d2bfb726eeed717953d15dddb22da9c2fcf48a4d70ba1456aa0a7faeda33cf7') <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-wheel', type='build') <NEW_LINE> depends_on('py-certifi') <NEW_LINE> depends_on('py-sniffio') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> depends_on('[email protected]:1.99') | HTTPX is a fully featured HTTP client for Python 3, which provides sync
and async APIs, and support for both HTTP/1.1 and HTTP/2. | 625990483cc13d1c6d466ad1 |
class SharedPoolGroup(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required=[] <NEW_LINE> self.b_key = "shared-pool-group" <NEW_LINE> self.a10_url="/axapi/v3/cgnv6/one-to-one/shared-pool-group/oper" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.oper = {} <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value) | Class Description::
Operational Status for the object shared-pool-group.
Class shared-pool-group supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/one-to-one/shared-pool-group/oper`. | 62599048be383301e0254bb3 |
class MrvOptiswitchSSH(CiscoSSHConnection): <NEW_LINE> <INDENT> def session_preparation(self): <NEW_LINE> <INDENT> self._test_channel_read(pattern=r"[>#]") <NEW_LINE> self.set_base_prompt() <NEW_LINE> self.enable() <NEW_LINE> self.disable_paging(command="no cli-paging") <NEW_LINE> time.sleep(0.3 * self.global_delay_factor) <NEW_LINE> self.set_base_prompt() <NEW_LINE> self.clear_buffer() <NEW_LINE> <DEDENT> def enable(self, cmd="enable", pattern=r"#", re_flags=re.IGNORECASE): <NEW_LINE> <INDENT> output = "" <NEW_LINE> if not self.check_enable_mode(): <NEW_LINE> <INDENT> self.write_channel(self.normalize_cmd(cmd)) <NEW_LINE> output += self.read_until_prompt_or_pattern( pattern=pattern, re_flags=re_flags ) <NEW_LINE> if not self.check_enable_mode(): <NEW_LINE> <INDENT> msg = ( "Failed to enter enable mode. Please ensure you pass " "the 'secret' argument to ConnectHandler." ) <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> <DEDENT> return output <NEW_LINE> <DEDENT> def save_config(self, cmd="save config flash", confirm=False, confirm_response=""): <NEW_LINE> <INDENT> return super(MrvOptiswitchSSH, self).save_config( cmd=cmd, confirm=confirm, confirm_response=confirm_response ) | MRV Communications Driver (OptiSwitch). | 6259904891af0d3eaad3b1bf |
class ArborealLabledWidget(TypesWidget): <NEW_LINE> <INDENT> _properties = TypesWidget._properties.copy() <NEW_LINE> _properties.update({'macro': 'arboreallabled_widget'}) <NEW_LINE> security = ClassSecurityInfo() <NEW_LINE> security.declarePublic('render_own_label') <NEW_LINE> def render_own_label(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> security.declarePublic('process_form') <NEW_LINE> def process_form(self, instance, field, form, empty_marker=None, emptyReturnsMarker=False): <NEW_LINE> <INDENT> lable = form.get(field.getName()+'_lable', empty_marker) <NEW_LINE> text = form.get(field.getName()+'_value', empty_marker) <NEW_LINE> value = (lable, text) <NEW_LINE> if value is empty_marker: <NEW_LINE> <INDENT> return empty_marker <NEW_LINE> <DEDENT> if emptyReturnsMarker and value == (): <NEW_LINE> <INDENT> return empty_marker <NEW_LINE> <DEDENT> return value, {} | " ArborealLabledWidget | 6259904824f1403a9268629a |
class sdist_debian(sdist): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_debian_name(): <NEW_LINE> <INDENT> import version <NEW_LINE> name = "%s_%s" % ("PyMca5", version.debianversion) <NEW_LINE> return name <NEW_LINE> <DEDENT> def prune_file_list(self): <NEW_LINE> <INDENT> sdist.prune_file_list(self) <NEW_LINE> to_remove = ["doc/build", "doc/pdf", "doc/html", "pylint", "epydoc"] <NEW_LINE> print("Removing files for debian") <NEW_LINE> for rm in to_remove: <NEW_LINE> <INDENT> self.filelist.exclude_pattern(pattern="*", anchor=False, prefix=rm) <NEW_LINE> <DEDENT> search_root = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> for root, _, files in os.walk(search_root): <NEW_LINE> <INDENT> for afile in files: <NEW_LINE> <INDENT> if os.path.splitext(afile)[1].lower() == ".pyx": <NEW_LINE> <INDENT> base_file = os.path.join(root, afile)[len(search_root) + 1:-4] <NEW_LINE> self.filelist.exclude_pattern(pattern=base_file + ".c") <NEW_LINE> self.filelist.exclude_pattern(pattern=base_file + ".cpp") <NEW_LINE> self.filelist.exclude_pattern(pattern=base_file + ".html") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def make_distribution(self): <NEW_LINE> <INDENT> self.prune_file_list() <NEW_LINE> sdist.make_distribution(self) <NEW_LINE> dest = self.archive_files[0] <NEW_LINE> dirname, basename = os.path.split(dest) <NEW_LINE> base, ext = os.path.splitext(basename) <NEW_LINE> while ext in [".zip", ".tar", ".bz2", ".gz", ".Z", ".lz", ".orig"]: <NEW_LINE> <INDENT> base, ext = os.path.splitext(base) <NEW_LINE> <DEDENT> debian_arch = os.path.join(dirname, self.get_debian_name() + ".orig.tar.gz") <NEW_LINE> os.rename(self.archive_files[0], debian_arch) <NEW_LINE> self.archive_files = [debian_arch] <NEW_LINE> print("Building debian .orig.tar.gz in %s" % self.archive_files[0]) | Tailor made sdist for debian
* remove auto-generated doc
* remove cython generated .c files
* remove cython generated .cpp files
* remove .bat files
* include .l man files | 62599048d6c5a102081e34b6 |
class TestPetscApp(unittest.TestCase): <NEW_LINE> <INDENT> def test_run(self): <NEW_LINE> <INDENT> app = TestApp() <NEW_LINE> app.run() <NEW_LINE> return | Test of PetscApplication. | 62599048097d151d1a2c2407 |
class GRRChipsecTest(client_test_lib.EmptyActionTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.chipsec_mock = mock.MagicMock() <NEW_LINE> self.chipsec_mock.chipset = mock.MagicMock() <NEW_LINE> self.chipsec_mock.chipset.UnknownChipsetError = MockUnknownChipsetError <NEW_LINE> self.chipsec_mock.hal = mock.MagicMock() <NEW_LINE> self.chipsec_mock.logger = mock.MagicMock() <NEW_LINE> mock_modules = { "chipsec": self.chipsec_mock, "chipsec.hal": self.chipsec_mock.hal, } <NEW_LINE> self.chipsec_patch = mock.patch.dict(sys.modules, mock_modules) <NEW_LINE> self.chipsec_patch.start() <NEW_LINE> from grr.client.components.chipsec_support.actions import grr_chipsec <NEW_LINE> self.grr_chipsec_module = grr_chipsec <NEW_LINE> self.grr_chipsec_module.chipset = self.chipsec_mock.chipset <NEW_LINE> self.grr_chipsec_module.logger = self.chipsec_mock.logger <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.chipsec_patch.stop() | Generic test class for GRR-Chipsec actions. | 62599048a79ad1619776b41b |
class Group(CommonModelNameNotUnique): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> app_label = 'main' <NEW_LINE> unique_together = (("name", "inventory"),) <NEW_LINE> <DEDENT> inventory = models.ForeignKey('Inventory', null=False, related_name='groups') <NEW_LINE> parents = models.ManyToManyField('self', symmetrical=False, related_name='children', blank=True) <NEW_LINE> variable_data = models.OneToOneField('VariableData', null=True, default=None, blank=True, on_delete=SET_NULL, related_name='group') <NEW_LINE> hosts = models.ManyToManyField('Host', related_name='groups', blank=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def can_user_add(cls, user, data): <NEW_LINE> <INDENT> if not 'inventory' in data: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> inventory = Inventory.objects.get(pk=data['inventory']) <NEW_LINE> return Inventory._has_permission_types(user, inventory, PERMISSION_TYPES_ALLOWING_INVENTORY_WRITE) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def can_user_administrate(cls, user, obj, data): <NEW_LINE> <INDENT> return Inventory._has_permission_types(user, obj.inventory, PERMISSION_TYPES_ALLOWING_INVENTORY_WRITE) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def can_user_read(cls, user, obj): <NEW_LINE> <INDENT> return Inventory.can_user_read(user, obj.inventory) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> import lib.urls <NEW_LINE> return reverse(lib.urls.views_GroupsDetail, args=(self.pk,)) | A group of managed nodes. May belong to multiple groups | 62599048d7e4931a7ef3d411 |
class XAxisConfig(ConfigSection): <NEW_LINE> <INDENT> available_options = options.X_AXIS_CONFIG <NEW_LINE> defaults = { "title": TitleConfig, } | The 'xAxis' section of a Highcharts config.
See http://www.highcharts.com/ref/#xAxis for available options. | 625990480a366e3fb87ddd81 |
class Get200(Notifications): <NEW_LINE> <INDENT> pass | OK | 6259904896565a6dacd2d957 |
class DALTONGeoOptTest(GenericGeoOptTest): <NEW_LINE> <INDENT> extracoords = 1 <NEW_LINE> def testoptdone(self): <NEW_LINE> <INDENT> self.assertTrue(self.data.optdone) <NEW_LINE> convergence = numpy.abs(self.data.geovalues[-1]) <= self.data.geotargets <NEW_LINE> self.assertTrue(sum(convergence) >= 2) | Customzed geometry optimization unittest | 6259904807d97122c421803d |
class TestCylinder(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> from sas.models.CylinderModel import CylinderModel <NEW_LINE> from sas.models.DiamCylFunc import DiamCylFunc <NEW_LINE> self.comp = CylinderModel() <NEW_LINE> self.diam = DiamCylFunc() <NEW_LINE> <DEDENT> def test(self): <NEW_LINE> <INDENT> self.comp.setParam("radius", 20) <NEW_LINE> self.comp.setParam("length",400) <NEW_LINE> self.diam.setParam("radius", 20) <NEW_LINE> self.diam.setParam("length",400) <NEW_LINE> self.assertAlmostEqual(self.comp.calculate_ER(), self.diam.run(0.1)/2) | Unit tests for calculate_ER (cylinder model) | 6259904826238365f5fadef8 |
@attr.s <NEW_LINE> class Pomodoro: <NEW_LINE> <INDENT> len_args = attr.ib(factory=dict) <NEW_LINE> num_rounds = attr.ib(default=3) <NEW_LINE> tracker = attr.ib(init=False) <NEW_LINE> def create_rounds(self): <NEW_LINE> <INDENT> rounds = [Round(self.len_args) for i in range(self.num_rounds)] <NEW_LINE> return rounds | main class to store basic pomodoro technique attributes,
such as length of sessions.
The user should be able to set the number of rounds and length
parameter through command line arguments. | 625990487cff6e4e811b6dd5 |
class EmailChange(LoginRequiredMixin, generic.FormView): <NEW_LINE> <INDENT> template_name = 'users/email_change_form.html' <NEW_LINE> form_class = EmailChangeForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> new_email = form.cleaned_data['email'] <NEW_LINE> current_site = get_current_site(self.request) <NEW_LINE> domain = current_site.domain <NEW_LINE> context = { 'protocol': 'https' if self.request.is_secure() else 'http', 'domain': domain, 'token': dumps(new_email), 'user': user, } <NEW_LINE> subject = render_to_string('users/mail_template/email_change/subject.txt', context) <NEW_LINE> message = render_to_string('users/mail_template/email_change/message.txt', context) <NEW_LINE> send_mail(subject, message, None, [new_email]) <NEW_LINE> return redirect('users:email_change_done') | メールアドレスの変更 | 6259904823e79379d538d89a |
class ManagerSite(AppManager): <NEW_LINE> <INDENT> def __init__(self,connection): <NEW_LINE> <INDENT> super(ManagerSite, self).__init__(MODULE_PATH, connection) <NEW_LINE> bindings = [ { 'observer': self, 'exchange': 'manage', 'key' : 'manager.#' }] <NEW_LINE> self.amqp.bind('manage', bindings) <NEW_LINE> pass <NEW_LINE> <DEDENT> def handle_message(self, message): <NEW_LINE> <INDENT> logger.debug("handle_message %s : %s" % (message.routing_key,message.body)) <NEW_LINE> data = json.loads(message.body) <NEW_LINE> managers.append(data) <NEW_LINE> pass <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.listener.close() <NEW_LINE> self.channel.close() | amqp manager for the pinger app
sets up an exclusive queue for an instance and binds it to the pinger exchange | 6259904845492302aabfd86e |
class Tree(): <NEW_LINE> <INDENT> def __init__(self, root): <NEW_LINE> <INDENT> self.root = root <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Tree root={root}>".format(root=self.root) <NEW_LINE> <DEDENT> def find_in_tree(self, data): <NEW_LINE> <INDENT> return self.root.find(data) | Tree. | 6259904891af0d3eaad3b1c1 |
class WorkgroupSubmissionReviewSerializer(serializers.HyperlinkedModelSerializer): <NEW_LINE> <INDENT> submission = serializers.PrimaryKeyRelatedField(queryset=WorkgroupSubmission.objects.all()) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = WorkgroupSubmissionReview <NEW_LINE> fields = ( 'id', 'url', 'created', 'modified', 'question', 'answer', 'submission', 'reviewer', 'content_id' ) | Serializer for model interactions | 6259904863b5f9789fe86509 |
class LossBinary: <NEW_LINE> <INDENT> def __init__(self, jaccard_weight=0): <NEW_LINE> <INDENT> self.nll_loss = nn.BCEWithLogitsLoss() <NEW_LINE> self.jaccard_weight = jaccard_weight <NEW_LINE> <DEDENT> def __call__(self, outputs, targets): <NEW_LINE> <INDENT> loss = (1 - self.jaccard_weight) * self.nll_loss(outputs, targets) <NEW_LINE> if self.jaccard_weight: <NEW_LINE> <INDENT> loss += self.jaccard_weight * (1 - soft_jaccard(outputs, targets)) <NEW_LINE> <DEDENT> return loss | Loss defined as BCE - log(soft_jaccard)
Vladimir Iglovikov, Sergey Mushinskiy, Vladimir Osin,
Satellite Imagery Feature Detection using Deep Convolutional Neural Network: A Kaggle Competition
arXiv:1706.06169 | 62599048004d5f362081f9b5 |
class MatchViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Match.objects.all() <NEW_LINE> serializer_class = MatchSerializer <NEW_LINE> filter_backends = (SearchFilter, DjangoFilterBackend) <NEW_LINE> search_fields = ('name',) <NEW_LINE> filter_fields = ('query_result', 'query_result__query', 'query_result__round', 'video_clip', 'user_match') | create:
Create a new match instance.
retrieve:
Return the given match.
list:
Return a list of all the existing matches.
update:
Update a given match as whole.
partial_update:
Update a set of parameters of a given match. | 6259904850485f2cf55dc325 |
class Provider(object): <NEW_LINE> <INDENT> def __init__(self, id): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.title = RPX_PROVIDERS[self.id] <NEW_LINE> self.icon_tag = u"<img src='/++resource++plonesocial.auth.rpx.icons/%s.png' title='%s' alt='%s'>" % (self.id, self.title, self.title) <NEW_LINE> self.icon_tag_title = u"%s %s" % (self.icon_tag, self.title) | represents a provider | 6259904830dc7b76659a0bd0 |
class DealornotAgent(RNNRolloutAgent): <NEW_LINE> <INDENT> def __init__(self, name, args, sel_args, train=False, diverse=False, max_total_len=100, model_url='https://tatk-data.s3-ap-northeast-1.amazonaws.com/rnnrollout_dealornot.zip'): <NEW_LINE> <INDENT> self.config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'configs') <NEW_LINE> self.file_url = model_url <NEW_LINE> self.auto_download() <NEW_LINE> if not os.path.exists(self.config_path): <NEW_LINE> <INDENT> os.mkdir(self.config_path) <NEW_LINE> <DEDENT> _model_path = os.path.join(self.config_path, 'models') <NEW_LINE> self.model_path = _model_path <NEW_LINE> if not os.path.exists(_model_path): <NEW_LINE> <INDENT> os.makedirs(_model_path) <NEW_LINE> <DEDENT> self.data_path = os.path.join(get_root_path(), args.data) <NEW_LINE> domain = get_domain(args.domain) <NEW_LINE> corpus = RnnModel.corpus_ty(domain, self.data_path, freq_cutoff=args.unk_threshold, verbose=True, sep_sel=args.sep_sel) <NEW_LINE> model = RnnModel(corpus.word_dict, corpus.item_dict_old, corpus.context_dict, corpus.count_dict, args) <NEW_LINE> state_dict = utils.load_model(os.path.join(self.config_path, args.model_file)) <NEW_LINE> model.load_state_dict(state_dict) <NEW_LINE> sel_model = SelectionModel(corpus.word_dict, corpus.item_dict_old, corpus.context_dict, corpus.count_dict, sel_args) <NEW_LINE> sel_state_dict = utils.load_model(os.path.join(self.config_path, sel_args.selection_model_file)) <NEW_LINE> sel_model.load_state_dict(sel_state_dict) <NEW_LINE> super(DealornotAgent, self).__init__(model, sel_model, args, name, train, diverse, max_total_len) <NEW_LINE> self.vis = args.visual <NEW_LINE> <DEDENT> def auto_download(self): <NEW_LINE> <INDENT> if os.path.exists(os.path.join(self.config_path, 'model/rnn_model_state_dict.th')) and os.path.exists(os.path.join(self.config_path, 'selection_model_state_dict.th')): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> models_dir = os.path.join(self.config_path, 'models') <NEW_LINE> cached_path(self.file_url, models_dir) <NEW_LINE> files = os.listdir(models_dir) <NEW_LINE> target_file = '' <NEW_LINE> for name in files: <NEW_LINE> <INDENT> if name.endswith('.json'): <NEW_LINE> <INDENT> target_file = name[:-5] <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> assert target_file in files <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print('allennlp download file error: RnnRollout Deal_or_Not data download failed.') <NEW_LINE> raise e <NEW_LINE> <DEDENT> zip_file_path = os.path.join(models_dir, target_file + '.zip') <NEW_LINE> shutil.copyfile(os.path.join(models_dir, target_file), zip_file_path) <NEW_LINE> with zipfile.ZipFile(zip_file_path, 'r') as zip_ref: <NEW_LINE> <INDENT> zip_ref.extractall(models_dir) | The Rnn Rollout model for DealorNot dataset. | 625990481f5feb6acb163f92 |
class HPMTimer(Timer): <NEW_LINE> <INDENT> top_level = 'GPAW.calculator' <NEW_LINE> compatible = ['Initialization','SCF-cycle'] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> Timer.__init__(self) <NEW_LINE> hpm_start(self.top_level) <NEW_LINE> <DEDENT> def start(self, name): <NEW_LINE> <INDENT> Timer.start(self, name) <NEW_LINE> if name in self.compatible: <NEW_LINE> <INDENT> hpm_start(name) <NEW_LINE> <DEDENT> <DEDENT> def stop(self, name=None): <NEW_LINE> <INDENT> Timer.stop(self, name) <NEW_LINE> if name in self.compatible: <NEW_LINE> <INDENT> hpm_stop(name) <NEW_LINE> <DEDENT> <DEDENT> def write(self, out=sys.stdout): <NEW_LINE> <INDENT> Timer.write(self, out) <NEW_LINE> hpm_stop(self.top_level) | HPMTimer requires installation of the IBM BlueGene/P HPM
middleware interface to the low-level UPC library. This will
most likely only work at ANL's BlueGene/P. Must compile
with GPAW_HPM macro in customize.py. Note that HPM_Init
and HPM_Finalize are called in _gpaw.c and not in the Python
interface. Timer must be called on all ranks in node, otherwise
HPM will hang. Hence, we only call HPM_start/stop on a list
subset of timers. | 625990488e05c05ec3f6f829 |
class PaginatorMixin(object): <NEW_LINE> <INDENT> paginator_class = DiggPaginator <NEW_LINE> paginate_by = 10 <NEW_LINE> paginator_body = 5 <NEW_LINE> paginator_tail = 2 <NEW_LINE> def get_paginator(self, queryset, per_page, orphans=0, allow_empty_first_page=True, body=5, tail=2): <NEW_LINE> <INDENT> return self.paginator_class(queryset, per_page, body=body, tail=tail, orphans=orphans, allow_empty_first_page=allow_empty_first_page) <NEW_LINE> <DEDENT> def paginate_queryset(self, queryset, page_size): <NEW_LINE> <INDENT> paginator = self.get_paginator(queryset, page_size, allow_empty_first_page=self.get_allow_empty(), body=self.paginator_body, tail=self.paginator_tail) <NEW_LINE> page = self.kwargs.get('page') or self.request.GET.get('page') or 1 <NEW_LINE> try: <NEW_LINE> <INDENT> page_number = int(page) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> if page == 'last': <NEW_LINE> <INDENT> page_number = paginator.num_pages <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Http404("Page is not 'last', nor can it be converted to an int.") <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> page = paginator.page(page_number) <NEW_LINE> return (paginator, page, page.object_list, page.has_other_pages()) <NEW_LINE> <DEDENT> except InvalidPage: <NEW_LINE> <INDENT> raise Http404('Invalid page (%(page_number)s)' % {'page_number': page_number }) | refer
https://bitbucket.org/dicos/digg-like-paginator/src/8cd1de9da5e5?at=master - this one adapt python34
and
https://djangosnippets.org/snippets/773/
use in ListView, source code in utils | 6259904830c21e258be99ba3 |
class ServiceAccount(_messages.Message): <NEW_LINE> <INDENT> email = _messages.StringField(1) <NEW_LINE> scopes = _messages.StringField(2, repeated=True) | A Compute Engine service account.
Fields:
email: Email address of the service account.
scopes: The list of scopes to be made available for this service account. | 625990486fece00bbacccd54 |
class CognitoResponseType(Enum): <NEW_LINE> <INDENT> AUTHENTICATED = 1 <NEW_LINE> INVALID_CREDENTIALS = 2 <NEW_LINE> TEMP_PWD_USED = 3 <NEW_LINE> PWD_RESET_REQUIRED = 4 <NEW_LINE> UNCONFIRMED = 5 <NEW_LINE> ERROR = 6 <NEW_LINE> CREATED = 7 <NEW_LINE> USER_EXISTS = 8 <NEW_LINE> BAD_PASSWORD = 9 <NEW_LINE> INCOMPLETE_REQUEST = 10 <NEW_LINE> CONFIRMED = 11 <NEW_LINE> INVALID_TOKEN = 12 <NEW_LINE> EXPIRED_TOKEN = 13 <NEW_LINE> FAILED_ATTEMPTS = 14 <NEW_LINE> RESENT = 15 <NEW_LINE> DELIVERY_FAILED = 16 <NEW_LINE> ALIAS_EXISTS = 17 | An enumeration representing different types of responses the Cognito IDP returns upon a particular request. | 625990484e696a045264e7ef |
class ProviderNotFoundError(Exception): <NEW_LINE> <INDENT> pass | The provider information was not found. | 62599048507cdc57c63a613b |
class Transform(object): <NEW_LINE> <INDENT> def __init__(self, w,h,xlow,ylow,xhigh, yhigh): <NEW_LINE> <INDENT> xspan = (xhigh - xlow) <NEW_LINE> yspan = (yhigh - ylow) <NEW_LINE> self.xbase = xlow <NEW_LINE> self.ybase = yhigh <NEW_LINE> self.xscale = xspan/float(w-1) <NEW_LINE> self.yscale = yspan/float(h-1) <NEW_LINE> <DEDENT> def screen(self, x,y): <NEW_LINE> <INDENT> xs = (x - self.xbase) / self.xscale <NEW_LINE> ys = (self.ybase - y) / self.yscale <NEW_LINE> return int(xs + 0.5),int(ys + 0.5) <NEW_LINE> <DEDENT> def world(self, xs,ys): <NEW_LINE> <INDENT> x = xs * self.xscale + self.xbase <NEW_LINE> y = ys * self.yscale + self.ybase <NEW_LINE> return int(x),int(y) | Internal class for 2D coordinate transformations. | 6259904815baa7234946332e |
class Stack(object): <NEW_LINE> <INDENT> def __init__(self, rc): <NEW_LINE> <INDENT> self.regclass = rc <NEW_LINE> <DEDENT> def stack_base_mask(self): <NEW_LINE> <INDENT> return 'StackBaseMask(1)' | An operand that must be in a stack slot.
A `Stack` object can be used to indicate an operand constraint for a value
operand that must live in a stack slot. | 6259904807f4c71912bb07d0 |
class Host(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def spawn(name, **kwds): <NEW_LINE> <INDENT> proc = ProcessSpawner(name=name, **kwds) <NEW_LINE> host = proc.client._import('pyacq.core.host').Host(name) <NEW_LINE> return proc, host <NEW_LINE> <DEDENT> def __init__(self, name, poll_procs=False): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.spawners = [] <NEW_LINE> server = RPCServer.get_server() <NEW_LINE> if server is not None: <NEW_LINE> <INDENT> server['host'] = self <NEW_LINE> if poll_procs: <NEW_LINE> <INDENT> self.timer = server.start_timer(self.check_spawners, interval=1.0) <NEW_LINE> <DEDENT> <DEDENT> atexit.register(self.close_all_nodegroups) <NEW_LINE> <DEDENT> def create_nodegroup(self, name, manager=None, qt=True, **kwds): <NEW_LINE> <INDENT> server = RPCServer.get_server() <NEW_LINE> addr = re.sub(r':\d+$', ':*', server.address.decode()) <NEW_LINE> sp = ProcessSpawner(name=name, qt=qt, address=addr, **kwds) <NEW_LINE> logger.info("Process started: %s" % sp) <NEW_LINE> rng = sp.client._import('pyacq.core.nodegroup') <NEW_LINE> sp._nodegroup = rng.NodeGroup(host=self, manager=manager) <NEW_LINE> sp.client['nodegroup'] = sp._nodegroup <NEW_LINE> sp._manager = manager <NEW_LINE> self.spawners.append(sp) <NEW_LINE> return sp._nodegroup <NEW_LINE> <DEDENT> def close_all_nodegroups(self, force=False): <NEW_LINE> <INDENT> for sp in self.spawners: <NEW_LINE> <INDENT> if force: <NEW_LINE> <INDENT> sp.kill() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sp.stop() <NEW_LINE> <DEDENT> <DEDENT> self.spawners = [] <NEW_LINE> <DEDENT> def check_spawners(self): <NEW_LINE> <INDENT> for sp in self.spawners[:]: <NEW_LINE> <INDENT> rval = sp.poll() <NEW_LINE> if sp.poll() is not None: <NEW_LINE> <INDENT> logger.info("Process exited: %s" % sp) <NEW_LINE> self.spawners.remove(sp) <NEW_LINE> sp._manager.nodegroup_closed(sp._nodegroup, _sync='off') | Host serves as a pre-existing contact point for spawning
new processes on a remote machine.
One Host instance must be running on each machine that will be connected
to by a Manager. The Host is only responsible for creating and destroying
NodeGroups. | 6259904826238365f5fadefa |
class JSONDecoderOrderedDict(AbstractJSONDecoderOrderedDict): <NEW_LINE> <INDENT> def __init__(self, parse_float=None, parse_int=None, parse_constant=None, strict=True): <NEW_LINE> <INDENT> AbstractJSONDecoderOrderedDict.__init__(self, parse_float=parse_float, parse_int=parse_int, parse_constant=parse_constant, strict=strict); | Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | OrderedDict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | String |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec. | 625990483eb6a72ae038b9f9 |
class BackupError(Exception): <NEW_LINE> <INDENT> pass | Base exception for backup errors. | 625990488a43f66fc4bf3533 |
class CustomIsAuthenticatedOrReadOnly(BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> return bool( request.method in SAFE_METHODS or request.user and request.user.is_staff ) | The request is authenticated as a an admin, or is a read-only request.
This is used to ensure that any user, even anonymous users can read but only the admin/staff can modify it.
Even authenticated, non-staff user cannot edit the data. | 6259904824f1403a9268629c |
class PokerHand(object): <NEW_LINE> <INDENT> RESULT = ["Loss", "Tie", "Win"] <NEW_LINE> def __init__(self, hand): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def convert_hand(self, hand): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_highest(self, hand1, hand2): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def compare_with(self, other): <NEW_LINE> <INDENT> pass | Program that compares your poker hand with other hand and returns the better
hand as per standard Texas Hold'em rules.
See also: https://www.codewars.com/kata/5739174624fc28e188000465/train/python
Returns Loss, Tie or Win. | 62599048004d5f362081f9b6 |
class TextDetector: <NEW_LINE> <INDENT> def __init__(self,MAX_HORIZONTAL_GAP=30,MIN_V_OVERLAPS=0.6,MIN_SIZE_SIM=0.6): <NEW_LINE> <INDENT> self.text_proposal_connector=TextProposalConnector(MAX_HORIZONTAL_GAP,MIN_V_OVERLAPS,MIN_SIZE_SIM) <NEW_LINE> <DEDENT> def detect_region(self, text_proposals,scores,size, TEXT_PROPOSALS_MIN_SCORE=0.7, TEXT_PROPOSALS_NMS_THRESH=0.3, TEXT_LINE_NMS_THRESH = 0.3,): <NEW_LINE> <INDENT> keep_inds=np.where(scores>TEXT_PROPOSALS_MIN_SCORE)[0] <NEW_LINE> text_proposals, scores=text_proposals[keep_inds], scores[keep_inds] <NEW_LINE> sorted_indices=np.argsort(scores.ravel())[::-1] <NEW_LINE> text_proposals, scores=text_proposals[sorted_indices], scores[sorted_indices] <NEW_LINE> keep_inds=nms(np.hstack((text_proposals, scores)), TEXT_PROPOSALS_NMS_THRESH,GPU_ID=self.GPU_ID) <NEW_LINE> text_proposals, scores=text_proposals[keep_inds], scores[keep_inds] <NEW_LINE> groups_boxes,groups_scores = self.text_proposal_connector.get_text_region(text_proposals, scores, size) <NEW_LINE> return groups_boxes,groups_scores <NEW_LINE> <DEDENT> def detect(self, text_proposals,scores,size, TEXT_PROPOSALS_MIN_SCORE=0.7, TEXT_PROPOSALS_NMS_THRESH=0.3, TEXT_LINE_NMS_THRESH = 0.3, ): <NEW_LINE> <INDENT> keep_inds=np.where(scores>TEXT_PROPOSALS_MIN_SCORE)[0] <NEW_LINE> text_proposals, scores=text_proposals[keep_inds], scores[keep_inds] <NEW_LINE> sorted_indices=np.argsort(scores.ravel())[::-1] <NEW_LINE> text_proposals, scores=text_proposals[sorted_indices], scores[sorted_indices] <NEW_LINE> if len(text_proposals)>0: <NEW_LINE> <INDENT> keep_inds=nms(np.hstack((text_proposals, scores)), TEXT_PROPOSALS_NMS_THRESH) <NEW_LINE> text_proposals, scores=text_proposals[keep_inds], scores[keep_inds] <NEW_LINE> scores=normalize(scores) <NEW_LINE> text_lines = self.text_proposal_connector.get_text_lines(text_proposals, scores, size) <NEW_LINE> keep_inds = nms(text_lines, TEXT_LINE_NMS_THRESH) <NEW_LINE> text_lines = text_lines[keep_inds] <NEW_LINE> return text_lines <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] | Detect text from an image | 62599048711fe17d825e166d |
class CodeSnippet(io.StringIO): <NEW_LINE> <INDENT> def __init__(self, code_string): <NEW_LINE> <INDENT> io.StringIO.__init__(self, textwrap.dedent(code_string)) | A code snippet.
Automatically wraps snippet as a file-like object and handles line wraps. | 6259904876d4e153a661dc45 |
class say_hello_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRING, 'success', None, None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.success = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('say_hello_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRING, 0) <NEW_LINE> oprot.writeString(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.success) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success | 62599048b57a9660fecd2e1b |
class TransactionQuote(BaseModel): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> for (param, value) in kwargs.items(): <NEW_LINE> <INDENT> if param=='outlets': <NEW_LINE> <INDENT> setattr(self, param, OutletDictionary(value)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, param, value) <NEW_LINE> <DEDENT> <DEDENT> setattr(self, 'created_at', dateutil.parser.parse(kwargs.get('created_at'))) <NEW_LINE> setattr(self, 'expires_at', dateutil.parser.parse(kwargs.get('expires_at'))) <NEW_LINE> setattr(self, 'btc_amount', Decimal(self.btc_amount)) <NEW_LINE> setattr(self, 'gross', Decimal(self.gross)) <NEW_LINE> setattr(self, 'rate', Decimal(self.rate)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "TransactionQuote(btc_amount={btc_amount}, currency={currency}, rate={rate}, created_at={created_at}, expires_at={expires_at}, gross={gross})".format( btc_amount=self.btc_amount, currency=self.currency, rate=self.rate, gross= self.gross, created_at=self.created_at, expires_at=self.expires_at) | A class that represents a Bitso Transaction Quote. | 6259904810dbd63aa1c71f7a |
class Labyrinth: <NEW_LINE> <INDENT> def __init__(self, file): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> <DEDENT> def display(self, window): <NEW_LINE> <INDENT> base = pygame.image.load(const.SPRITESHEET) <NEW_LINE> wall = base.subsurface((120, 100, 20, 20)) <NEW_LINE> way = base.subsurface((20, 0, 20, 20)) <NEW_LINE> start = base.subsurface((160, 20, 20, 20)) <NEW_LINE> finish = base.subsurface((220, 20, 20, 20)) <NEW_LINE> guardian = pygame.image.load(const.GUARDIAN).convert_alpha() <NEW_LINE> guardian = pygame.transform.scale(guardian, (19, 19)) <NEW_LINE> open_file = open(self.file) <NEW_LINE> lignes = open_file.readlines() <NEW_LINE> for i, value in enumerate(lignes): <NEW_LINE> <INDENT> if value == "wall \n": <NEW_LINE> <INDENT> window.blit(wall, (get_position(i))) <NEW_LINE> <DEDENT> elif value == "way \n": <NEW_LINE> <INDENT> window.blit(way, (get_position(i))) <NEW_LINE> <DEDENT> elif value == "start \n": <NEW_LINE> <INDENT> window.blit(start, (get_position(i))) <NEW_LINE> <DEDENT> elif value == "finish \n": <NEW_LINE> <INDENT> window.blit(finish, (get_position(i))) <NEW_LINE> window.blit(guardian, (get_position(i))) <NEW_LINE> <DEDENT> <DEDENT> open_file.close() | Structure's class to display map and
convert index, coordinates | 625990481f5feb6acb163f94 |
class Layer: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.W=[] <NEW_LINE> self.b=[] <NEW_LINE> self.a=[] <NEW_LINE> self.z=[] <NEW_LINE> self.d_W=[] <NEW_LINE> self.d_b=[] <NEW_LINE> self.d_a=[] <NEW_LINE> self.d_z=[] <NEW_LINE> self.feature = "" | A class used to represent a Layer of a Neural Network
...
Attributes
----------
W : list
the incoming weights
b : list
the biases
a : list
the activations
z : list
the outputs
s : list
the gradient of the incoming weights
d_b : list
the gradient of the biases
d_a : list
the gradient of the activations
d_z : list
the gradient of the outputs | 625990480a366e3fb87ddd85 |
class rankings(): <NEW_LINE> <INDENT> def __init__(self, size): <NEW_LINE> <INDENT> self._len = size <NEW_LINE> self._lst = list() <NEW_LINE> <DEDENT> def insert(self, item, value): <NEW_LINE> <INDENT> high = len(self._lst) <NEW_LINE> if (len(self._lst) != 0 and self._lst[0].value < value): <NEW_LINE> <INDENT> high = 0 <NEW_LINE> <DEDENT> low = 0 <NEW_LINE> while high > low + 1: <NEW_LINE> <INDENT> guess = (int) ((high + low) / 2) <NEW_LINE> if self._lst[guess].value < value: <NEW_LINE> <INDENT> high = guess <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> low = guess <NEW_LINE> <DEDENT> <DEDENT> self._lst.insert(high, types.SimpleNamespace( item=item, value=value ) ) <NEW_LINE> if len(self._lst) > self._len: <NEW_LINE> <INDENT> del self._lst[-1] <NEW_LINE> <DEDENT> <DEDENT> def getList(self): <NEW_LINE> <INDENT> return list(x.item for x in self._lst) | Sorted fixed-size array | 6259904823e79379d538d89d |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.