code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class GameStats: <NEW_LINE> <INDENT> def __init__(self, ai_game): <NEW_LINE> <INDENT> self.settings = ai_game.settings <NEW_LINE> self.reset_stats() <NEW_LINE> self.game_active = False <NEW_LINE> self.high_score = 0 <NEW_LINE> <DEDENT> def reset_stats(self): <NEW_LINE> <INDENT> self.ships_left = self.settings.ship_limit <NEW_LINE> self.score = 0 <NEW_LINE> self.level = 1
Monitorowanie danych statystycznych gry.
6259905fd486a94d0ba2d65b
class SavGolFilterDetrender(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, window=201, order=3): <NEW_LINE> <INDENT> self.window = window <NEW_LINE> self.order = order <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def transform(self,X): <NEW_LINE> <INDENT> self.fit_params = {} <NEW_LINE> X_new = X.copy() <NEW_LINE> for col in X.columns: <NEW_LINE> <INDENT> tmp_data = X[col].values.astype(np.double) <NEW_LINE> sgf = savgol_filter(tmp_data, self.window, self.order) <NEW_LINE> self.fit_params[col] = dict(sgf=sgf) <NEW_LINE> X_new[col] = tmp_data - sgf <NEW_LINE> <DEDENT> return X_new
Detrend the calcium signal using a Savitzky-Golay filter Parameters ---------- window : int, optional (default: 201) Number of samples to use to build the Savitzky-Golay filter order : int, optional (default: 3) Order of the Savitzky-Golay filter
6259905f3539df3088ecd92f
class UrlTest(Package): <NEW_LINE> <INDENT> homepage = "http://www.url-fetch-example.com" <NEW_LINE> version('test', url='to-be-filled-in-by-test') <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> pass
Mock package that fetches from a URL.
6259905fa8370b77170f1a61
class TestCleanupChainRemoval(AttrCalcTestCase): <NEW_LINE> <INDENT> def testAttribute(self): <NEW_LINE> <INDENT> attr1 = self.ch.attribute(attributeId=1) <NEW_LINE> attr2 = self.ch.attribute(attributeId=2) <NEW_LINE> attr3 = self.ch.attribute(attributeId=3) <NEW_LINE> modifier1 = Modifier() <NEW_LINE> modifier1.state = State.offline <NEW_LINE> modifier1.context = Context.local <NEW_LINE> modifier1.sourceAttributeId = attr1.id <NEW_LINE> modifier1.operator = Operator.postMul <NEW_LINE> modifier1.targetAttributeId = attr2.id <NEW_LINE> modifier1.location = Location.ship <NEW_LINE> modifier1.filterType = None <NEW_LINE> modifier1.filterValue = None <NEW_LINE> effect1 = self.ch.effect(effectId=1, categoryId=EffectCategory.passive) <NEW_LINE> effect1.modifiers = (modifier1,) <NEW_LINE> holder1 = CharacterItem(self.ch.type_(typeId=1, effects=(effect1,), attributes={attr1.id: 5})) <NEW_LINE> modifier2 = Modifier() <NEW_LINE> modifier2.state = State.offline <NEW_LINE> modifier2.context = Context.local <NEW_LINE> modifier2.sourceAttributeId = attr2.id <NEW_LINE> modifier2.operator = Operator.postPercent <NEW_LINE> modifier2.targetAttributeId = attr3.id <NEW_LINE> modifier2.location = Location.ship <NEW_LINE> modifier2.filterType = FilterType.all_ <NEW_LINE> modifier2.filterValue = None <NEW_LINE> effect2 = self.ch.effect(effectId=2, categoryId=EffectCategory.passive) <NEW_LINE> effect2.modifiers = (modifier2,) <NEW_LINE> holder2 = IndependentItem(self.ch.type_(typeId=2, effects=(effect2,), attributes={attr2.id: 7.5})) <NEW_LINE> holder3 = ShipItem(self.ch.type_(typeId=3, attributes={attr3.id: 0.5})) <NEW_LINE> self.fit.items.add(holder1) <NEW_LINE> self.fit.ship = holder2 <NEW_LINE> self.fit.items.add(holder3) <NEW_LINE> self.assertAlmostEqual(holder3.attributes[attr3.id], 0.6875) <NEW_LINE> self.fit.items.remove(holder1) <NEW_LINE> self.assertAlmostEqual(holder3.attributes[attr3.id], 0.5375) <NEW_LINE> self.fit.ship = None <NEW_LINE> self.fit.items.remove(holder3) <NEW_LINE> self.assertEqual(len(self.log), 0) <NEW_LINE> self.assertLinkBuffersEmpty(self.fit)
Check that removed item damages all attributes which were relying on its attributes
6259905f442bda511e95d8a3
class DescribeDomainsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Domains = None <NEW_LINE> self.TotalCount = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("Domains") is not None: <NEW_LINE> <INDENT> self.Domains = [] <NEW_LINE> for item in params.get("Domains"): <NEW_LINE> <INDENT> obj = DomainBriefInfo() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Domains.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> self.RequestId = params.get("RequestId")
DescribeDomains response structure.
62599060d7e4931a7ef3d6ce
class SWAP(ObservableBase): <NEW_LINE> <INDENT> def __init__(self, A): <NEW_LINE> <INDENT> self.name = "SWAP" <NEW_LINE> self.symbol = "S" <NEW_LINE> self.A = A <NEW_LINE> <DEDENT> def apply(self, nn_state, samples): <NEW_LINE> <INDENT> samples = samples.to(device=nn_state.device) <NEW_LINE> samples1 = samples <NEW_LINE> samples2 = torch.roll(samples1, 1, 0) <NEW_LINE> samples1_, samples2_ = swap(samples1.clone(), samples2.clone(), self.A) <NEW_LINE> weight1 = nn_state.importance_sampling_weight(samples1_, samples1) <NEW_LINE> weight2 = nn_state.importance_sampling_weight(samples2_, samples2) <NEW_LINE> weight = cplx.elementwise_mult(weight1, weight2) <NEW_LINE> return cplx.real(weight)
The :math:`\text{Swap}_A` observable. Can be used to compute the 2nd Renyi entropy of the region A through: :math:`S_2 = -\ln\langle \text{SWAP}_A \rangle` Ref: PhysRevLett.104.157201 :param A: The sites contained in the region A. :type A: int or list or np.array or torch.Tensor
625990601f5feb6acb16427e
class DQNPlayer(Player): <NEW_LINE> <INDENT> def __init__(self, team, config, use_pretrained=True, name='MinMaxPlayer', verbose=True): <NEW_LINE> <INDENT> super(DQNPlayer, self).__init__(team, name, verbose) <NEW_LINE> self.model = TicTacToeDQNModel(config) <NEW_LINE> self.model.load(use_pretrained=use_pretrained) <NEW_LINE> <DEDENT> def move(self, game): <NEW_LINE> <INDENT> board = self.team * np.array(game.board) <NEW_LINE> values = self.model.infer(board) <NEW_LINE> top_choices = sorted([(val, i) for i, val in enumerate(values)], reverse=True) <NEW_LINE> available_moves = utils.row_col_moves_to_idx(game.available_moves()) <NEW_LINE> for val, i in top_choices: <NEW_LINE> <INDENT> if i in available_moves: <NEW_LINE> <INDENT> row, col = utils.idx_to_row_col(i) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if self.verbose: <NEW_LINE> <INDENT> print('Predicted choice values:') <NEW_LINE> self.print_values(values) <NEW_LINE> print('{} selects ({}, {})'.format(self.name, row, col)) <NEW_LINE> <DEDENT> return game.move(row, col) <NEW_LINE> <DEDENT> def print_values(self, values): <NEW_LINE> <INDENT> f_string = '\n'.join(['{:+.8f}\t' * 3] * 3) + '\n' <NEW_LINE> print(f_string.format(*values))
Class for a Deep Q-Network player.
625990607d847024c075da67
class RbacUnderPermissionException(BasePatroleException): <NEW_LINE> <INDENT> message = "Authorized action was not allowed to be performed"
Raised when the expected result is pass but the actual result is failure.
625990607d847024c075da68
class Estimator(object): <NEW_LINE> <INDENT> dataset = None <NEW_LINE> optimizer = None <NEW_LINE> network = None <NEW_LINE> transformer = None <NEW_LINE> def __init__(self, dataset, optimizer, network, transformer): <NEW_LINE> <INDENT> self.dataset = dataset <NEW_LINE> self.optimizer = optimizer <NEW_LINE> self.network = network <NEW_LINE> self.transformer = transformer <NEW_LINE> self.logger = logging.getLogger(__name__) <NEW_LINE> <DEDENT> def train(self, epochs): <NEW_LINE> <INDENT> for epoch in range(epochs): <NEW_LINE> <INDENT> mini_batches = self.dataset.batches(self.optimizer.batch_size) <NEW_LINE> train_loss = 0 <NEW_LINE> train_acc = 0 <NEW_LINE> for step, batch in enumerate(mini_batches): <NEW_LINE> <INDENT> x, y = self.transformer(*batch) <NEW_LINE> output = self.network.forward(x) <NEW_LINE> accuracy, loss = self.network.loss(output, y) <NEW_LINE> train_acc += accuracy <NEW_LINE> train_loss += loss <NEW_LINE> self.network.backward(output, y) <NEW_LINE> self.optimizer.step() <NEW_LINE> self.optimizer.zero_grad() <NEW_LINE> <DEDENT> train_acc /= step <NEW_LINE> train_loss /= step <NEW_LINE> self.logger.info('TRAIN: Completed epoch {epoch} with accuracy {accuracy}% and loss {loss.data}'.format(epoch=epoch, accuracy=(train_acc * 100), loss=train_loss)) <NEW_LINE> if self.dataset.valid: <NEW_LINE> <INDENT> valid = self.dataset.valid <NEW_LINE> valid_x, valid_y = self.transformer(*valid) <NEW_LINE> valid_acc, valid_loss = self.network.loss(self.network.forward(valid_x), valid_y) <NEW_LINE> <DEDENT> self.logger.info('VALID: Completed epoch for {epoch} with accuracy {accuracy}% and loss {loss.data}'.format(epoch=epoch, accuracy=(valid_acc * 100), loss=valid_loss)) <NEW_LINE> <DEDENT> <DEDENT> def test(self): <NEW_LINE> <INDENT> test = self.dataset.test <NEW_LINE> x, y = self.transformer(*test) <NEW_LINE> output = self.network.forward(x) <NEW_LINE> accuracy, loss = self.network.loss(output, y) <NEW_LINE> self.logger.info('TEST: Model trained with test accuracy {accuracy}% and loss {loss.data}'.format(accuracy=(accuracy * 100), loss=loss))
This is losely based on the tensorflow estimator: We'll first implement the MNIST estimator and bring the common functionality here. Now that i think about it, data transformation can be a bitch - Need to think through this thoroughly. But regarding the API's that we expose, I think we can just take that up from tensorflows implementation. 1. takes a function that transforms the input dataset into a format that the network can consume.
625990602ae34c7f260ac77a
class ParamEstimationPSAEM2(Simulator): <NEW_LINE> <INDENT> def maximize(self, param0, num_part, max_iter=1000, tol=0.001, callback=None, callback_sim=None, meas_first=False, filter='cpfas', filter_options=None, smoother='full', smoother_options=None, alpha_gen=alpha_gen, discard_eps=0.0, discard_percentile=0): <NEW_LINE> <INDENT> params_local = numpy.copy(param0) <NEW_LINE> alltrajs = None <NEW_LINE> weights = numpy.empty((max_iter * num_part,)) <NEW_LINE> datalen = 0 <NEW_LINE> for i in range(max_iter): <NEW_LINE> <INDENT> self.set_params(params_local) <NEW_LINE> self.simulate(num_part, 1, filter=filter, filter_options=filter_options, smoother=smoother, smoother_options=smoother_options, meas_first=meas_first) <NEW_LINE> tmp = numpy.copy(self.straj.traj) <NEW_LINE> T = len(tmp) <NEW_LINE> N = tmp[0].pa.part.shape[0] <NEW_LINE> D = tmp[0].pa.part.shape[1] <NEW_LINE> newtrajs = numpy.empty((T, N, D)) <NEW_LINE> for t in range(T): <NEW_LINE> <INDENT> newtrajs[t] = tmp[t].pa.part <NEW_LINE> <DEDENT> w = 1.0 <NEW_LINE> alpha = alpha_gen(i) <NEW_LINE> weights[:datalen] *= (1.0 - alpha) <NEW_LINE> weights[datalen:datalen + 1] = alpha * w <NEW_LINE> filter_options['cond_traj'] = numpy.copy(self.straj.traj) <NEW_LINE> if (callback_sim is not None): <NEW_LINE> <INDENT> callback_sim(self) <NEW_LINE> <DEDENT> if (alltrajs is None): <NEW_LINE> <INDENT> alltrajs = numpy.copy(newtrajs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> alltrajs = numpy.concatenate((alltrajs[:, :datalen], newtrajs), axis=1) <NEW_LINE> <DEDENT> datalen += 1 <NEW_LINE> tmp = numpy.percentile(weights[:datalen], discard_percentile) <NEW_LINE> wlow = numpy.max(numpy.hstack( (weights[:datalen][weights[:datalen] < tmp], 0.0))) <NEW_LINE> threshold = min(discard_eps, wlow) <NEW_LINE> zero_ind = (weights[:datalen] <= threshold) <NEW_LINE> zerolen = numpy.count_nonzero(zero_ind) <NEW_LINE> weights[:datalen - zerolen] = weights[:datalen][~zero_ind] <NEW_LINE> alltrajs[:, :datalen - zerolen] = alltrajs[:, :datalen][:, ~zero_ind] <NEW_LINE> datalen -= zerolen <NEW_LINE> weights[:datalen] /= numpy.sum(weights[:datalen]) <NEW_LINE> params_local = self.model.maximize_weighted(self.straj, alltrajs[:, :datalen], weights[:datalen]) <NEW_LINE> if (callback is not None): <NEW_LINE> <INDENT> callback(params=params_local, Q=-numpy.Inf, cur_iter=i + 1) <NEW_LINE> <DEDENT> <DEDENT> return (params_local, -numpy.Inf)
Extension of the Simulator class to iterative perform particle smoothing combined with a gradienst search algorithms for maximizing the likelihood of the parameter estimates
6259906091af0d3eaad3b4bc
class GameData: <NEW_LINE> <INDENT> enemies = None <NEW_LINE> obstacles = None <NEW_LINE> towers = None <NEW_LINE> grid = None <NEW_LINE> path = None
Class to hold data in a game without granting unrestricted access to top-level modelling class directly
625990604f6381625f199fed
class Rotacionar90X3(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.rotate90x3" <NEW_LINE> bl_label = "+{}º x_3".format(R) <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.active_object is not None <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> rotate(context, 'x_3', R) <NEW_LINE> return {'FINISHED'}
Operator 'rotate'. Rotaciona o grupo x_3 +90 graus.
6259906063b5f9789fe86807
class StoppableThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(StoppableThread, self).__init__(*args, **kwargs) <NEW_LINE> self.setDaemon(True) <NEW_LINE> self._stop = threading.Event() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._stop.set() <NEW_LINE> <DEDENT> def stopped(self): <NEW_LINE> <INDENT> return self._stop.isSet()
Thread class with a stop() method. The thread itself has to check regularly for the stopped() condition.
6259906024f1403a92686418
class elements(object): <NEW_LINE> <INDENT> def __init__(self, cls): <NEW_LINE> <INDENT> self.cls = cls <NEW_LINE> <DEDENT> def __getattr__(self, tag): <NEW_LINE> <INDENT> return self.cls(tag)
This module allows the user to import arbitrary elements by tag name example: >>> from untemplate.elements import Html, Head, Body >>> print(Html) <html /> >>> print(Html(Body)) <html><body /></html>
6259906032920d7e50bc76db
class ArcNodeAbstractTestCase(FictionOutlineAbstractTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.node_to_test = self.arc1.arc_root_node.get_children()[0] <NEW_LINE> self.part1 = self.o1.story_tree_root.add_child( name='Part 1', description='A long time ago in a galaxy far away', story_element_type='part') <NEW_LINE> self.o1_valid_storynode = self.part1.add_child( name='Chapter One', story_element_type='chapter', description='Our story begins') <NEW_LINE> logger.debug('self.o1_valid_storynode is a node of type \'%s\' and has a pk of %s' % (self.o1_valid_storynode.story_element_type, self.o1_valid_storynode.pk)) <NEW_LINE> self.o1_invalid_node = self.o2.story_tree_root.add_child(name='Chapter One', story_element_type='chapter', description='A totally different story begins.')
Adds additional properties to test.
6259906045492302aabfdb6e
class IntentDatabaseHandler(DatabaseMetadataContainerController): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> super().__init__(SQLQueryFactory(table = "intent", fields = { "id": "text", "name": "text", "quality_type": "text", "intent_category": "text", "variant": "text", "definition": "text", "material": "text", "version": "text", "setting_version": "text" })) <NEW_LINE> self._container_type = InstanceContainer
The Database handler for Intent containers
62599060e76e3b2f99fda094
class BaseModel(db1.Model): <NEW_LINE> <INDENT> __abstract__ = True <NEW_LINE> @property <NEW_LINE> def dict(self): <NEW_LINE> <INDENT> return utils.to_dict(self, self.__class__)
Clase modelo base.
625990605166f23b2e244a67
class MVPAVoxelSelector: <NEW_LINE> <INDENT> def __init__(self, data, mask, labels, num_folds, sl ): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.mask = mask.astype(np.bool) <NEW_LINE> self.labels = labels <NEW_LINE> self.num_folds = num_folds <NEW_LINE> self.sl = sl <NEW_LINE> num_voxels = np.sum(self.mask) <NEW_LINE> if num_voxels == 0: <NEW_LINE> <INDENT> raise ValueError('Zero processed voxels') <NEW_LINE> <DEDENT> <DEDENT> def run(self, clf): <NEW_LINE> <INDENT> rank = MPI.COMM_WORLD.Get_rank() <NEW_LINE> if rank == 0: <NEW_LINE> <INDENT> logger.info( 'running activity-based voxel selection via Searchlight' ) <NEW_LINE> <DEDENT> self.sl.distribute([self.data], self.mask) <NEW_LINE> self.sl.broadcast((self.labels, self.num_folds, clf)) <NEW_LINE> if rank == 0: <NEW_LINE> <INDENT> logger.info( 'data preparation done' ) <NEW_LINE> <DEDENT> result_volume = self.sl.run_searchlight(_sfn) <NEW_LINE> result_list = result_volume[self.mask] <NEW_LINE> results = [] <NEW_LINE> if rank == 0: <NEW_LINE> <INDENT> for idx, value in enumerate(result_list): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> value = 0 <NEW_LINE> <DEDENT> results.append((idx, value)) <NEW_LINE> <DEDENT> results.sort(key=lambda tup: tup[1], reverse=True) <NEW_LINE> logger.info( 'activity-based voxel selection via Searchlight is done' ) <NEW_LINE> <DEDENT> return result_volume, results
Activity-based voxel selection component of FCMA Parameters ---------- data: 4D array in shape [brain 3D + epoch] contains the averaged and normalized brain data epoch by epoch. It is generated by .io.prepare_searchlight_mvpa_data mask: 3D array labels: 1D array contains the labels of the epochs. It is generated by .io.prepare_searchlight_mvpa_data num_folds: int the number of folds to be conducted in the cross validation sl: Searchlight the distributed Searchlight object
62599060dd821e528d6da4cb
class UserValidate(object): <NEW_LINE> <INDENT> def __init__(self, db): <NEW_LINE> <INDENT> self.db_client = db <NEW_LINE> <DEDENT> def on_get(self, req, resp, token): <NEW_LINE> <INDENT> valid_langs = { 'en_US': 'en_US', 'es_CO': 'es_CO', 'pt_BR': 'pt_BR' } <NEW_LINE> value_lang_cookie = valid_langs.get( req.cookies.get('user_lang')) or 'es_CO' <NEW_LINE> _ = gettext.translation('account_validation', '/code/locale', languages=[value_lang_cookie]).gettext <NEW_LINE> try: <NEW_LINE> <INDENT> user = jwt.decode( token, os.getenv('JWT_SECRET'), verify='True', algorithms=['HS512'], ) <NEW_LINE> self.db_client.users.find_one_and_update( {'email': user.get('email')}, {'$set': {'validated': True}}, ) <NEW_LINE> queue().enqueue('utils.email_new_user_admin_notification.send_email') <NEW_LINE> <DEDENT> except jwt.ExpiredSignatureError as __: <NEW_LINE> <INDENT> resp.status = falcon.HTTP_UNAUTHORIZED <NEW_LINE> resp.body = dumps({'message': [_('JWT token expired')]}) <NEW_LINE> <DEDENT> except jwt.DecodeError as __: <NEW_LINE> <INDENT> resp.status = falcon.HTTP_UNAUTHORIZED <NEW_LINE> resp.body = dumps({'message': [_('JWT decode error')]})
Deal with user validation.
625990600fa83653e46f657c
class GWS_S35_STD_Class: <NEW_LINE> <INDENT> def __init__(self, Pin, Reversal): <NEW_LINE> <INDENT> self.mPin = Pin <NEW_LINE> self.mReversal = Reversal <NEW_LINE> GPIO.setup(self.mPin, GPIO.OUT) <NEW_LINE> self.mPwm = GPIO.PWM(self.mPin , 500) <NEW_LINE> self.Stop() <NEW_LINE> <DEDENT> """停止""" <NEW_LINE> def Stop(self): <NEW_LINE> <INDENT> self.mPwm.start(0) <NEW_LINE> <DEDENT> """速度セット""" <NEW_LINE> def SetSpeed(self, speed): <NEW_LINE> <INDENT> if speed > 100: <NEW_LINE> <INDENT> speed = 100 <NEW_LINE> <DEDENT> elif speed < -100: <NEW_LINE> <INDENT> speed = -100 <NEW_LINE> <DEDENT> elif -MIN_SPEED < speed and speed < MIN_SPEED: <NEW_LINE> <INDENT> self.Stop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> duty = (self.mReversal*speed+100)/200*30+60 <NEW_LINE> self.mPwm.start(duty) <NEW_LINE> <DEDENT> <DEDENT> """終了処理""" <NEW_LINE> def Cleanup(self): <NEW_LINE> <INDENT> self.Stop() <NEW_LINE> GPIO.setup(self.mPin, GPIO.IN)
コンストラクタ
625990603d592f4c4edbc571
class SingleDocumentWriter(BaseWriter): <NEW_LINE> <INDENT> def __init__(self, stream: BinaryIO, headers_included: bool=False): <NEW_LINE> <INDENT> self._stream = stream <NEW_LINE> self._headers_included = headers_included <NEW_LINE> <DEDENT> def session(self) -> SingleDocumentWriterSession: <NEW_LINE> <INDENT> return SingleDocumentWriterSession(self._stream, self._headers_included)
Writer that writes all the data into a single file.
62599060d7e4931a7ef3d6cf
class assemblyReferenceInitialRep: <NEW_LINE> <INDENT> def __init__(self): pass <NEW_LINE> def clear(self, rootAssemblyName): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getInitialRep(self, targetAssemblyName): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def reader(self, rootAssemblyName): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def writer(self, rootAssemblyName): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def className(): pass <NEW_LINE> @staticmethod <NEW_LINE> def enableDebugOutput(value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> initialRepDictionaries = {} <NEW_LINE> kRepKey = 'rep' <NEW_LINE> kSubKey = 'sub' <NEW_LINE> kWantDebugOutput = False
This utility class is invoked by the sceneAssembly plug-in to manage the save, restore and query of the initial representation information for scene assemblies. An assembly's initial representation is the representation that will be activated when the assembly is first loaded. Each top level scene assembly node will remember the active configuration of its hierarchy at the time it is saved. When the assembly is re-opened, the stored configuration will be used to restore this state. The interface to this class is defined by the following methods: writer(): will create an initialRep definition on a top level assembly This is based on the current activiation state of the assembly hierarchy when the method is called. The scene assembly plug-in will call the writer() method just before file save. reader(): will load in an initialRep definition from a top level assembly. The data loaded will be used by subsequent calls to getInitialRep for the assemblies in its hierarchy. The scene assembly plug-in will invoke the reader() as part of the top level assembly's postLoad routine. getInitialRep(): queries the initialRep data currently available for a given assembly. The routine uses the data that was stored on the associated top level assembly, and loaded in by the reader() method. The scene assembly plug-in will use the initialRep information to determine the initial activation state of the subassembly when it is first loaded. clear(): will clear the initialRep definition for a top level assembly. Subsequent calls to getInitialRep() will return emtpy values. The scene assembly plug-in will call clear() when the initial representation data for a top level assembly is no longer required (after all assemblies in its hierarchy have finished activating). Internally the initialRep information is stored in a hierarchical python dictionary, which has nested entries corresponding to the assembly hierarchy. The dictionary is persisted using a JSON structure which can be readily mapped to the internal python dictionary structure. The JSON structure is stored as string data on the 'initialRep' attribute on top level assembly nodes.
625990603539df3088ecd932
class Author(models.Model): <NEW_LINE> <INDENT> name = models.CharField(verbose_name=_("Name"), max_length=50) <NEW_LINE> created = models.DateTimeField( verbose_name=_("Created"), editable=False, blank=True, auto_now_add=True ) <NEW_LINE> modified = models.DateTimeField( verbose_name=_("Modified"), editable=False, blank=True, auto_now=True ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ["name"] <NEW_LINE> verbose_name = _("Author") <NEW_LINE> verbose_name_plural = _("Authors") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Author object
62599060498bea3a75a59149
class AvailabeRooms: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.rooms = {key: [] for key in DAYS} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r)" % (self.__class__, self.__dict__) <NEW_LINE> <DEDENT> def add(self, room_resource): <NEW_LINE> <INDENT> self.rooms[room_resource.day].append(room_resource) <NEW_LINE> <DEDENT> def length(self, day): <NEW_LINE> <INDENT> return len(self.rooms[day]) <NEW_LINE> <DEDENT> def unused_rooms(self): <NEW_LINE> <INDENT> total = 0 <NEW_LINE> for day in DAYS: <NEW_LINE> <INDENT> for _ in self.rooms[day]: <NEW_LINE> <INDENT> total += 1 <NEW_LINE> <DEDENT> <DEDENT> return total <NEW_LINE> <DEDENT> def pull_out_resources(self, course): <NEW_LINE> <INDENT> rooms = [] <NEW_LINE> number_of_slots = course.needed_time_slots() <NEW_LINE> day_index = 0 <NEW_LINE> for should_assign_day in course.days: <NEW_LINE> <INDENT> if should_assign_day: <NEW_LINE> <INDENT> avail = self.rooms[DAYS[day_index]] <NEW_LINE> for i in range(len(avail) - number_of_slots): <NEW_LINE> <INDENT> avail_resources = avail[i:number_of_slots] <NEW_LINE> if number_of_slots > len(avail_resources): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if avail_resources[0].room.capacity > course.num_students: <NEW_LINE> <INDENT> rooms += avail_resources <NEW_LINE> for remove_index in range(i, i+number_of_slots): <NEW_LINE> <INDENT> self.rooms[DAYS[day_index]].pop(remove_index) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> day_index += 1 <NEW_LINE> <DEDENT> return rooms
List of available room resources
625990608e7ae83300eea723
class Vreddit(Service): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def preprocess(cls, url: str, data: Any) -> str: <NEW_LINE> <INDENT> xpost: Optional[Any] = helpers.get(data, "crosspost_parent_list") <NEW_LINE> fallback_url: str <NEW_LINE> if xpost is not None and len(xpost) > 0: <NEW_LINE> <INDENT> fallback_url = helpers.chained_get( xpost[0], ["secure_media", "reddit_video", "fallback_url"] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fallback_url = helpers.chained_get( data, ["media", "reddit_video", "fallback_url"] ) <NEW_LINE> <DEDENT> processed_url: str = ( fallback_url if fallback_url else f"{url}/DASH_1_2_M" ) <NEW_LINE> if requests.head(processed_url).status_code >= 300: <NEW_LINE> <INDENT> processed_url = f"{url}/DASH_1080" <NEW_LINE> <DEDENT> return processed_url <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def postprocess(cls, response) -> Media: <NEW_LINE> <INDENT> media: Media = Media(response.url, ContentType.GIF) <NEW_LINE> if "Content-length" in response.headers: <NEW_LINE> <INDENT> media.size = int(response.headers["Content-length"]) <NEW_LINE> <DEDENT> return media
Service for v.redd.it GIFs.
625990607047854f46340a53
class PygameApp(object): <NEW_LINE> <INDENT> def __init__(self, screensize = (400,400), fullscreen = False, title = 'PygameApp Window'): <NEW_LINE> <INDENT> self.screensize = screensize <NEW_LINE> self.fullscreen = fullscreen <NEW_LINE> self.title = title <NEW_LINE> self.elapsedms = 0 <NEW_LINE> pygame.init() <NEW_LINE> self.clock = pygame.time.Clock() <NEW_LINE> if self.fullscreen: <NEW_LINE> <INDENT> self.displayinfo = pygame.display.Info() <NEW_LINE> self.screensize = (self.displayinfo.current_w, self.displayinfo.current_h) <NEW_LINE> self.display = pygame.display.set_mode(self.screensize, FULLSCREEN) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.display = pygame.display.set_mode(self.screensize) <NEW_LINE> pygame.display.set_caption(self.title) <NEW_LINE> <DEDENT> self.setbackgroundcolor(pygame.Color('black')) <NEW_LINE> <DEDENT> def setbackgroundcolor(self, color): <NEW_LINE> <INDENT> self.backgroundcolor = color <NEW_LINE> self.erase() <NEW_LINE> <DEDENT> def erase(self): <NEW_LINE> <INDENT> self.display.fill(self.backgroundcolor) <NEW_LINE> <DEDENT> def run(self, fps = 50): <NEW_LINE> <INDENT> self.fps = fps <NEW_LINE> running = True <NEW_LINE> while running: <NEW_LINE> <INDENT> for event in pygame.event.get(): <NEW_LINE> <INDENT> if event.type != QUIT: <NEW_LINE> <INDENT> running = self.handle_event(event) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> running = False <NEW_LINE> <DEDENT> <DEDENT> self.elapsedms += self.clock.tick(self.fps) <NEW_LINE> self.poll() <NEW_LINE> pygame.display.update() <NEW_LINE> <DEDENT> self.quit() <NEW_LINE> <DEDENT> def quit(self): <NEW_LINE> <INDENT> pygame.quit() <NEW_LINE> <DEDENT> def handle_event(self, event): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def poll(self): <NEW_LINE> <INDENT> pass
Class that encapsulates a basic pygame application.
62599060cb5e8a47e493ccd0
class Execute(BaseAction): <NEW_LINE> <INDENT> def _Run(self, command, success_codes, reboot_codes, restart_retry): <NEW_LINE> <INDENT> result = 0 <NEW_LINE> c = cache.Cache() <NEW_LINE> logging.debug('Interpreting command %s', command) <NEW_LINE> try: <NEW_LINE> <INDENT> command = c.CacheFromLine(command, self._build_info) <NEW_LINE> <DEDENT> except cache.CacheError as e: <NEW_LINE> <INDENT> raise ActionError(e) <NEW_LINE> <DEDENT> logging.info('Executing command %s', command) <NEW_LINE> try: <NEW_LINE> <INDENT> result = subprocess.call(command, shell=True) <NEW_LINE> <DEDENT> except WindowsError as e: <NEW_LINE> <INDENT> raise ActionError('Failed to execute command %s (%s)' % (command, str(e))) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> logging.debug('Child received KeyboardInterrupt. Ignoring.') <NEW_LINE> <DEDENT> if result in reboot_codes: <NEW_LINE> <INDENT> raise RestartEvent( 'Restart triggered by exit code %d' % result, 5, retry_on_restart=restart_retry) <NEW_LINE> <DEDENT> elif result not in success_codes: <NEW_LINE> <INDENT> raise ActionError('Command returned invalid exit code %d' % result) <NEW_LINE> <DEDENT> time.sleep(5) <NEW_LINE> <DEDENT> def Run(self): <NEW_LINE> <INDENT> for cmd in self._args: <NEW_LINE> <INDENT> command = cmd[0] <NEW_LINE> success_codes = [0] <NEW_LINE> reboot_codes = [] <NEW_LINE> restart_retry = False <NEW_LINE> if len(cmd) > 1 and cmd[1]: <NEW_LINE> <INDENT> success_codes = cmd[1] <NEW_LINE> <DEDENT> if len(cmd) > 2 and cmd[2]: <NEW_LINE> <INDENT> reboot_codes = cmd[2] <NEW_LINE> <DEDENT> if len(cmd) > 3: <NEW_LINE> <INDENT> restart_retry = cmd[3] <NEW_LINE> <DEDENT> self._Run(command, success_codes, reboot_codes, restart_retry) <NEW_LINE> <DEDENT> <DEDENT> def Validate(self): <NEW_LINE> <INDENT> self._TypeValidator(self._args, list) <NEW_LINE> for cmd_arg in self._args: <NEW_LINE> <INDENT> self._TypeValidator(cmd_arg, list) <NEW_LINE> if not 1 <= len(cmd_arg) <= 4: <NEW_LINE> <INDENT> raise ValidationError('Invalid args length: %s' % cmd_arg) <NEW_LINE> <DEDENT> self._TypeValidator(cmd_arg[0], str) <NEW_LINE> if len(cmd_arg) > 1: <NEW_LINE> <INDENT> self._TypeValidator(cmd_arg[1], list) <NEW_LINE> for arg in cmd_arg[1]: <NEW_LINE> <INDENT> self._TypeValidator(arg, int) <NEW_LINE> <DEDENT> <DEDENT> if len(cmd_arg) > 2: <NEW_LINE> <INDENT> self._TypeValidator(cmd_arg[2], list) <NEW_LINE> for arg in cmd_arg[2]: <NEW_LINE> <INDENT> self._TypeValidator(arg, int) <NEW_LINE> <DEDENT> <DEDENT> if len(cmd_arg) > 3: <NEW_LINE> <INDENT> self._TypeValidator(cmd_arg[3], bool)
Run an executable.
625990609c8ee82313040cd5
class IPCError(Exception): <NEW_LINE> <INDENT> pass
Base exception for all IPC related errors.
6259906091af0d3eaad3b4be
class TF: <NEW_LINE> <INDENT> Array = tf.Tensor <NEW_LINE> Tensor = tf.Tensor <NEW_LINE> Tensor1 = Tensor <NEW_LINE> Tensor2 = Tensor <NEW_LINE> Vector = Tensor2 <NEW_LINE> Covector = Tensor2 <NEW_LINE> Matrix = Tensor2 <NEW_LINE> Tensor3 = Tensor <NEW_LINE> Tensor4 = Tensor <NEW_LINE> Tensor5 = Tensor <NEW_LINE> Tensor6 = Tensor <NEW_LINE> Tensor7 = Tensor <NEW_LINE> Tensor8 = Tensor <NEW_LINE> VectorLike = Union[Numeric, Sequence[Numeric], Array] <NEW_LINE> MatrixLike = Union[VectorLike, Sequence[VectorLike]] <NEW_LINE> CovectorLike = MatrixLike <NEW_LINE> ArrayLike = TensorLike = Union[MatrixLike, Sequence[MatrixLike], Sequence[Sequence[MatrixLike]]] <NEW_LINE> VectorOrMatrix = TypeVar('VectorOrMatrix', Vector, Matrix) <NEW_LINE> Slice = PairOfInts = tf.Tensor <NEW_LINE> @classmethod <NEW_LINE> @property <NEW_LINE> def NaN(cls) -> TF.Tensor: <NEW_LINE> <INDENT> return tf.constant(np.NaN, dtype=FLOAT()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @property <NEW_LINE> def NOT_CALCULATED(cls) -> TF.Tensor: <NEW_LINE> <INDENT> return tf.constant('Not Calculated')
Extended tensorflow types.
62599060f7d966606f749404
class EventRegistration(models.Model): <NEW_LINE> <INDENT> event = models.ForeignKey(Event, null=False) <NEW_LINE> feePayable = models.IntegerField() <NEW_LINE> feePaid = models.BooleanField(default=False, null=False) <NEW_LINE> participants = models.ManyToManyField(Candidate) <NEW_LINE> teamName=models.CharField(max_length=40, null=True) <NEW_LINE> timeStamp = models.DateTimeField(default=timezone.now, editable=False) <NEW_LINE> registeredBy = models.ForeignKey(User, limit_choices_to={'groups__name':'registrar'}, null=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(self.pk)+" "+str(self.teamName)+" "+str(self.event)
Stores the imformation of event registration
625990604a966d76dd5f058a
class UnaryFunction1DVectorViewShape: <NEW_LINE> <INDENT> integration_type: 'IntegrationType' = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, integration_type: 'IntegrationType'): <NEW_LINE> <INDENT> pass
Base class for unary functions (functors) that work on Interface1D and return a list of ViewShape objects.
62599060a17c0f6771d5d6ef
class StubTestCase(TestCase): <NEW_LINE> <INDENT> bolt_uri = "bolt://localhost:7687" <NEW_LINE> bolt_routing_uri = "bolt+routing://localhost:7687" <NEW_LINE> user = "test" <NEW_LINE> password = "test" <NEW_LINE> auth_token = (user, password)
Base class for test cases that integrate with a server.
62599060dd821e528d6da4cc
class ContrastiveLoss(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, margin=2.0): <NEW_LINE> <INDENT> super(ContrastiveLoss, self).__init__() <NEW_LINE> self.margin = margin <NEW_LINE> <DEDENT> def forward(self, output1, output2, label): <NEW_LINE> <INDENT> return torch.mean(self.forward_vector(output1, output2, label)) <NEW_LINE> <DEDENT> def forward_vector(self, output1, output2, label): <NEW_LINE> <INDENT> euclidean_distance = pairwise_distance(output1, output2) <NEW_LINE> loss_contrastive_vec = (1-label) * torch.pow(euclidean_distance, 2) + label * torch.pow(torch.clamp(self.margin - euclidean_distance, min=0.0, max=self.margin), 2) <NEW_LINE> return loss_contrastive_vec
Contrastive loss function. Based on: http://yann.lecun.com/exdb/publis/pdf/hadsell-chopra-lecun-06.pdf
62599060baa26c4b54d50938
class SSHServerProcess(SSHProcess, SSHServerStreamSession): <NEW_LINE> <INDENT> def __init__(self, process_factory, sftp_factory, allow_scp): <NEW_LINE> <INDENT> SSHProcess.__init__(self) <NEW_LINE> SSHServerStreamSession.__init__(self, self._start_process, sftp_factory, allow_scp) <NEW_LINE> self._process_factory = process_factory <NEW_LINE> <DEDENT> def _start_process(self, stdin, stdout, stderr): <NEW_LINE> <INDENT> self._stdin = stdin <NEW_LINE> self._stdout = stdout <NEW_LINE> self._stderr = stderr <NEW_LINE> return self._process_factory(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def stdin(self): <NEW_LINE> <INDENT> return self._stdin <NEW_LINE> <DEDENT> @property <NEW_LINE> def stdout(self): <NEW_LINE> <INDENT> return self._stdout <NEW_LINE> <DEDENT> @property <NEW_LINE> def stderr(self): <NEW_LINE> <INDENT> return self._stderr <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def redirect(self, stdin=None, stdout=None, stderr=None, bufsize=io.DEFAULT_BUFFER_SIZE, send_eof=True): <NEW_LINE> <INDENT> if stdin: <NEW_LINE> <INDENT> yield from self._create_writer(stdin, bufsize, send_eof) <NEW_LINE> <DEDENT> if stdout: <NEW_LINE> <INDENT> yield from self._create_reader(stdout, bufsize, send_eof) <NEW_LINE> <DEDENT> if stderr: <NEW_LINE> <INDENT> yield from self._create_reader(stderr, bufsize, send_eof, EXTENDED_DATA_STDERR) <NEW_LINE> <DEDENT> <DEDENT> @asyncio.coroutine <NEW_LINE> def redirect_stdin(self, target, bufsize=io.DEFAULT_BUFFER_SIZE, send_eof=True): <NEW_LINE> <INDENT> yield from self.redirect(target, None, None, bufsize, send_eof) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def redirect_stdout(self, source, bufsize=io.DEFAULT_BUFFER_SIZE, send_eof=True): <NEW_LINE> <INDENT> yield from self.redirect(None, source, None, bufsize, send_eof) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def redirect_stderr(self, source, bufsize=io.DEFAULT_BUFFER_SIZE, send_eof=True): <NEW_LINE> <INDENT> yield from self.redirect(None, None, source, bufsize, send_eof) <NEW_LINE> <DEDENT> def get_environment(self): <NEW_LINE> <INDENT> return self.env <NEW_LINE> <DEDENT> def get_command(self): <NEW_LINE> <INDENT> return self.command <NEW_LINE> <DEDENT> def get_subsystem(self): <NEW_LINE> <INDENT> return self.subsystem <NEW_LINE> <DEDENT> def get_terminal_type(self): <NEW_LINE> <INDENT> return self._chan.get_terminal_type() <NEW_LINE> <DEDENT> def get_terminal_size(self): <NEW_LINE> <INDENT> return self._chan.get_terminal_size() <NEW_LINE> <DEDENT> def get_terminal_mode(self, mode): <NEW_LINE> <INDENT> return self._chan.get_terminal_mode(mode) <NEW_LINE> <DEDENT> def exit(self, status): <NEW_LINE> <INDENT> self._chan.exit(status) <NEW_LINE> <DEDENT> def exit_with_signal(self, signal, core_dumped=False, msg='', lang=DEFAULT_LANG): <NEW_LINE> <INDENT> return self._chan.exit_with_signal(signal, core_dumped, msg, lang)
SSH server process handler
625990603d592f4c4edbc573
class UserAgentFilterRule(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RuleType = None <NEW_LINE> self.RulePaths = None <NEW_LINE> self.UserAgents = None <NEW_LINE> self.FilterType = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RuleType = params.get("RuleType") <NEW_LINE> self.RulePaths = params.get("RulePaths") <NEW_LINE> self.UserAgents = params.get("UserAgents") <NEW_LINE> self.FilterType = params.get("FilterType") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
UserAgent黑白名单规则配置
625990602ae34c7f260ac77d
class TextCaptchaService: <NEW_LINE> <INDENT> def __init__(self, apikey): <NEW_LINE> <INDENT> self._browser = Browser.Browser() <NEW_LINE> self._apikey = apikey <NEW_LINE> <DEDENT> def captcha(self): <NEW_LINE> <INDENT> response = self._browser.open('http://textcaptcha.com/api/' + self._apikey, set_referer=False).read() <NEW_LINE> question = '' <NEW_LINE> answers = [] <NEW_LINE> if len(response) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> document = xml.dom.minidom.parseString(response) <NEW_LINE> question = document.getElementsByTagName('question')[0].firstChild.data <NEW_LINE> nodes = document.getElementsByTagName('answer') <NEW_LINE> if len(nodes) == 0: <NEW_LINE> <INDENT> raise RuntimeError('Unable to parse answer(s) from textCAPTCHA') <NEW_LINE> <DEDENT> for i in range(len(nodes)): <NEW_LINE> <INDENT> answers.append(nodes[i].firstChild.data) <NEW_LINE> <DEDENT> return (question, answers)
Methods to request a new textCAPTCHA.
62599060009cb60464d02bcd
class EventSubscription: <NEW_LINE> <INDENT> def __init__(self, event_type, filters=None): <NEW_LINE> <INDENT> self.event_type = event_type <NEW_LINE> if filters: <NEW_LINE> <INDENT> self.filters = filters <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.filters = [] <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self.event_type != other.event_type: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for f in self.filters: <NEW_LINE> <INDENT> if f not in other.filters: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def __contains__(self, event): <NEW_LINE> <INDENT> if event.event_type == self.event_type: <NEW_LINE> <INDENT> for sub_filter in self.filters: <NEW_LINE> <INDENT> if event not in sub_filter: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> return False
Represents a subscription to events. An event is part of a subscription if its type matches the type of the subscription and, if any filters are included in the subscription, it passes all filters.
62599060460517430c432b9e
class QryLinkManField(Base): <NEW_LINE> <INDENT> _fields_ = [ ('BrokerID', ctypes.c_char * 11), ('InvestorID', ctypes.c_char * 13), ] <NEW_LINE> def __init__(self, BrokerID='', InvestorID=''): <NEW_LINE> <INDENT> super(QryLinkManField, self).__init__() <NEW_LINE> self.BrokerID = self._to_bytes(BrokerID) <NEW_LINE> self.InvestorID = self._to_bytes(InvestorID)
查询联系人
6259906016aa5153ce401b74
class SubscribeResponse(Response): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(SubscribeResponse, self).__init__(*args, **kwargs) <NEW_LINE> self.original_callback = self.callback <NEW_LINE> self.callback = self.handle_message <NEW_LINE> self.channels = 0 <NEW_LINE> <DEDENT> def handle_message(self, message): <NEW_LINE> <INDENT> message_type, message_channel, message_arg = message <NEW_LINE> if message_type == "subscribe": <NEW_LINE> <INDENT> self.channels += 1 <NEW_LINE> <DEDENT> elif message_type == "unsubscribe": <NEW_LINE> <INDENT> self.channels -= 1 <NEW_LINE> <DEDENT> self.original_callback(message) <NEW_LINE> if self.channels > 0: <NEW_LINE> <INDENT> self.callback = self.handle_message <NEW_LINE> self.stream.read_bytes(1, self.handle_response)
Handles the long-running subscription connections
6259906099cbb53fe6832578
class MonitorRecord(object): <NEW_LINE> <INDENT> def __init__(self, domid, timestamp, buf, moniLen, moniType, domClock): <NEW_LINE> <INDENT> self.domid = domid <NEW_LINE> self.timestamp = timestamp <NEW_LINE> self.buf = buf <NEW_LINE> self.moniLen = moniLen <NEW_LINE> self.moniType = moniType <NEW_LINE> self.domClock = domClock <NEW_LINE> <DEDENT> def getDOMClock(self): <NEW_LINE> <INDENT> return self.domClock
Generic monitor record type - supports the common base information contained in all monitor records.
6259906067a9b606de5475ed
class Reaction(Node): <NEW_LINE> <INDENT> def __init__(self, formula, uid=None, type=None, reversibility='Unknown'): <NEW_LINE> <INDENT> Node.__init__(self) <NEW_LINE> self.formula = formula <NEW_LINE> self.uid = uid <NEW_LINE> self.type = type <NEW_LINE> self.reversibility = reversibility
An object of class Reaction. It inherits all methods from the Node class.
625990603617ad0b5ee077e5
class Thunk: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.base_url = "http://thunk.us/" <NEW_LINE> self.poke_states = ["good", "bad", "iffy", "unknown"] <NEW_LINE> <DEDENT> def create(self, name=None): <NEW_LINE> <INDENT> values = {} <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> values["name"] = name <NEW_LINE> <DEDENT> data = self._query(self.base_url, values) <NEW_LINE> return data <NEW_LINE> <DEDENT> def poke(self, uid, state, payload=None): <NEW_LINE> <INDENT> if state not in self.poke_states: <NEW_LINE> <INDENT> raise PokeStateError(("Invalid poke state %s given" % state)) <NEW_LINE> <DEDENT> url = self.base_url + uid + "/" + state <NEW_LINE> values = {} <NEW_LINE> if payload is not None: <NEW_LINE> <INDENT> values["payload"] = payload <NEW_LINE> <DEDENT> return self._query(url, values) <NEW_LINE> <DEDENT> def check(self, uid): <NEW_LINE> <INDENT> if isinstance(uid, list): <NEW_LINE> <INDENT> extension = ",".join(["%s" % (ids) for ids in uid]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> extension = uid <NEW_LINE> <DEDENT> url = self.base_url + extension <NEW_LINE> return self._query(url) <NEW_LINE> <DEDENT> def _query(self, url, data = None): <NEW_LINE> <INDENT> if data is not None: <NEW_LINE> <INDENT> values = urllib.urlencode(data) <NEW_LINE> request = urllib2.Request(url, values) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request = urllib2.Request(url) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> response = urllib2.urlopen(request) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> json_data = response.read() <NEW_LINE> data = json.loads(json_data) <NEW_LINE> return data
class for creating an object which can talk to the thunk.us API
62599060004d5f362081fb3b
class ShowFabricMulticastIpSaAdRouteSchema(MetaParser): <NEW_LINE> <INDENT> schema ={ "multicast": { "vrf": { Any(): { "vnid": str, Optional("address_family"): { Any(): { "sa_ad_routes": { "gaddr": { Any(): { "grp_len": int, "saddr": { Any(): { "src_len": int, "uptime": str, Optional("interested_fabric_nodes"): { Any(): { "uptime": str, } } } } } } } } } } } } }
Schema for: show fabric multicast ipv4 sa-ad-route show fabric multicast ipv4 sa-ad-route vrf <vrf> show fabric multicast ipv4 sa-ad-route vrf all
625990601f5feb6acb164282
class Question(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=255, verbose_name=_('Title')) <NEW_LINE> content = models.TextField(verbose_name=_('Content')) <NEW_LINE> questioner = models.ForeignKey(User, verbose_name=_('Questioner')) <NEW_LINE> vote_up = models.IntegerField(verbose_name=_('Vote Up'), default=0) <NEW_LINE> vote_down = models.IntegerField(verbose_name=_('Vote Down'), default=0) <NEW_LINE> create_time = models.DateTimeField(verbose_name=_('Create Time'), auto_now_add=True) <NEW_LINE> tags = models.ManyToManyField(Tag, verbose_name=_('Tags')) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__unicode__() <NEW_LINE> <DEDENT> class Meta(object): <NEW_LINE> <INDENT> app_label = 'athena' <NEW_LINE> verbose_name = _('Question') <NEW_LINE> verbose_name_plural = _('Questions')
Question model
625990607d847024c075da6c
class ImplementationViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Implementation.objects.all() <NEW_LINE> serializer_class = ImplementationSerializer
API endpoint that allows companies to be viewed or edited.
6259906007f4c71912bb0ad5
class HTTPBasicAuthFromNetrc(HTTPBasicAuth): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> auth = _get_netrc_auth(url) <NEW_LINE> if not auth: <NEW_LINE> <INDENT> raise ValueError("netrc missing or no credentials found in netrc") <NEW_LINE> <DEDENT> username, password = auth <NEW_LINE> super(HTTPBasicAuthFromNetrc, self).__init__(username, password)
HTTP Basic Auth with netrc credentials.
6259906029b78933be26ac10
class GetCommentResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) <NEW_LINE> <DEDENT> def get_Limit(self): <NEW_LINE> <INDENT> return self._output.get('Limit', None) <NEW_LINE> <DEDENT> def get_Remaining(self): <NEW_LINE> <INDENT> return self._output.get('Remaining', None)
A ResultSet with methods tailored to the values returned by the GetComment Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
62599060b7558d5895464a79
class ftrl_proximal(object): <NEW_LINE> <INDENT> def __init__(self, alpha, beta, L1, L2, D, interaction=False): <NEW_LINE> <INDENT> self.alpha = alpha <NEW_LINE> self.beta = beta <NEW_LINE> self.L1 = L1 <NEW_LINE> self.L2 = L2 <NEW_LINE> self.D = D <NEW_LINE> self.interaction = interaction <NEW_LINE> self.n = [0.] * D <NEW_LINE> self.z = [0.] * D <NEW_LINE> self.w = {} <NEW_LINE> <DEDENT> def _indices(self, x): <NEW_LINE> <INDENT> yield 0 <NEW_LINE> for index in x: <NEW_LINE> <INDENT> yield index <NEW_LINE> <DEDENT> if self.interaction: <NEW_LINE> <INDENT> D = self.D <NEW_LINE> L = len(x) <NEW_LINE> x = sorted(x) <NEW_LINE> for i in xrange(L): <NEW_LINE> <INDENT> for j in xrange(i+1, L): <NEW_LINE> <INDENT> yield abs(hash(str(x[i]) + '_' + str(x[j]))) % D <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def predict_proba(self, x): return self.predict(x) <NEW_LINE> def predict(self, x): <NEW_LINE> <INDENT> alpha = self.alpha <NEW_LINE> beta = self.beta <NEW_LINE> L1 = self.L1 <NEW_LINE> L2 = self.L2 <NEW_LINE> n = self.n <NEW_LINE> z = self.z <NEW_LINE> w = {} <NEW_LINE> wTx = 0. <NEW_LINE> for i in self._indices(x): <NEW_LINE> <INDENT> sign = -1. if z[i] < 0 else 1. <NEW_LINE> if sign * z[i] <= L1: <NEW_LINE> <INDENT> w[i] = 0. <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> w[i] = (sign * L1 - z[i]) / ((beta + sqrt(n[i])) / alpha + L2) <NEW_LINE> <DEDENT> wTx += w[i] <NEW_LINE> <DEDENT> self.w = w <NEW_LINE> return 1. / (1. + exp(-max(min(wTx, 35.), -35.))) <NEW_LINE> <DEDENT> def update(self, x, p, y): <NEW_LINE> <INDENT> alpha = self.alpha <NEW_LINE> n = self.n <NEW_LINE> z = self.z <NEW_LINE> w = self.w <NEW_LINE> g = p - y <NEW_LINE> for i in self._indices(x): <NEW_LINE> <INDENT> sigma = (sqrt(n[i] + g * g) - sqrt(n[i])) / alpha <NEW_LINE> z[i] += g - sigma * w[i] <NEW_LINE> n[i] += g * g
Our main algorithm: Follow the regularized leader - proximal In short, this is an adaptive-learning-rate sparse logistic-regression with efficient L1-L2-regularization
62599060462c4b4f79dbd09d
class Number(_Controller): <NEW_LINE> <INDENT> _TEMPLATE = 'number.jsx' <NEW_LINE> _COMPONENT = 'AntNumber' <NEW_LINE> _PACKAGE = None <NEW_LINE> _ATTRS = ('start={{{start}}} ' 'min={{{minimum}}} ' 'max={{{maximum}}} ' 'step={{{step}}} ' "size={{'{size}'}}") <NEW_LINE> def __init__(self, start: int = 0, minimum: float = -1e100, maximum: float = 1e100, step: int = 1, size: str = 'default') -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._comp = self._tag.format( start=start, minimum=minimum, maximum=maximum, step=step, size=size ) <NEW_LINE> <DEDENT> def on_change(self): <NEW_LINE> <INDENT> return self.get <NEW_LINE> <DEDENT> def get(self, data): <NEW_LINE> <INDENT> return data
A number input widget with increment and decrement buttons.
6259906097e22403b383c5a5
class Client(object): <NEW_LINE> <INDENT> def __init__(self, host='http://localhost/v1', api_key=None): <NEW_LINE> <INDENT> self._params = {'api_key': api_key} if api_key else {} <NEW_LINE> self._host = host <NEW_LINE> <DEDENT> def start(self, config: WatcherConfig = None): <NEW_LINE> <INDENT> data = config.as_dict() if config else {} <NEW_LINE> endpoint = '%s/start' % self._host <NEW_LINE> response = requests.post(endpoint, params=self._params, json=data) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif response.status_code == 401: <NEW_LINE> <INDENT> raise Unauthorized("You have no access to the endpoint") <NEW_LINE> <DEDENT> elif response.status_code == 400: <NEW_LINE> <INDENT> print(response.json()) <NEW_LINE> raise ServiceException( 'Failed to start: %s ' % response.json()['reason']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception(response.content) <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> endpoint = '%s/stop' % self._host <NEW_LINE> response = requests.post(endpoint, params=self._params) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif response.status_code == 401: <NEW_LINE> <INDENT> raise Unauthorized("You have no access to the endpoint") <NEW_LINE> <DEDENT> elif response.status_code == 400: <NEW_LINE> <INDENT> raise ServiceException( 'Failed to stop: %s ' % response.json()['reason']) <NEW_LINE> <DEDENT> <DEDENT> def status(self) -> WatcherStatus: <NEW_LINE> <INDENT> endpoint = '%s/status' % self._host <NEW_LINE> response = requests.get(endpoint, params=self._params) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> return WatcherStatus().update_from_response(response.json()) <NEW_LINE> <DEDENT> elif response.status_code == 401: <NEW_LINE> <INDENT> raise Unauthorized("You have no access to the endpoint") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception(response.content) <NEW_LINE> <DEDENT> <DEDENT> def markets(self): <NEW_LINE> <INDENT> endpoint = '%s/markets' % self._host <NEW_LINE> response = requests.get(endpoint, params=self._params) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> return response.json() <NEW_LINE> <DEDENT> elif response.status_code == 401: <NEW_LINE> <INDENT> raise Unauthorized("You have no access to the endpoint") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception(response.content)
Provides the set of endpoints to manage arbitrage watcher service.
62599060d6c5a102081e37bc
class Query(ObjectType): <NEW_LINE> <INDENT> me = Field(UserType, description=_('user information')) <NEW_LINE> check_token = Field( Boolean, uid=String(), token=String() ) <NEW_LINE> @login_required <NEW_LINE> def resolve_me(self, info): <NEW_LINE> <INDENT> return info.context.user <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def resolve_check_token(self, info, **kwargs): <NEW_LINE> <INDENT> uid = kwargs.get('uid') <NEW_LINE> token = kwargs.get('token') <NEW_LINE> uid = force_text(urlsafe_base64_decode(uid)) <NEW_LINE> try: <NEW_LINE> <INDENT> user = User.objects.get(pk=uid) <NEW_LINE> if account_activation_token.check_token(user, token): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return False
Graphql Queries
625990600c0af96317c578ab
class MaxPooling2D(_Pooling2D): <NEW_LINE> <INDENT> def __init__(self, pool_size=(2, 2), strides=None, border_mode='valid', dim_ordering=K.image_dim_ordering(), **kwargs): <NEW_LINE> <INDENT> super(MaxPooling2D, self).__init__(pool_size, strides, border_mode, dim_ordering, **kwargs) <NEW_LINE> <DEDENT> def _pooling_function(self, inputs, pool_size, strides, border_mode, dim_ordering): <NEW_LINE> <INDENT> output = K.pool2d(inputs, pool_size, strides, border_mode, dim_ordering, pool_mode='max') <NEW_LINE> return output
Max pooling operation for spatial data. # Arguments pool_size: tuple of 2 integers, factors by which to downscale (vertical, horizontal). (2, 2) will halve the image in each dimension. strides: tuple of 2 integers, or None. Strides values. If None, it will default to `pool_size`. border_mode: 'valid' or 'same'. Note: 'same' will only work with TensorFlow for the time being. dim_ordering: 'th' or 'tf'. In 'th' mode, the channels dimension (the depth) is at index 1, in 'tf' mode is it at index 3. It defaults to the `image_dim_ordering` value found in your Keras config file at `~/.keras/keras.json`. If you never set it, then it will be "th". # Input shape 4D tensor with shape: `(samples, channels, rows, cols)` if dim_ordering='th' or 4D tensor with shape: `(samples, rows, cols, channels)` if dim_ordering='tf'. # Output shape 4D tensor with shape: `(nb_samples, channels, pooled_rows, pooled_cols)` if dim_ordering='th' or 4D tensor with shape: `(samples, pooled_rows, pooled_cols, channels)` if dim_ordering='tf'.
625990604a966d76dd5f058c
class CI(object): <NEW_LINE> <INDENT> SUCCESS = 'S' <NEW_LINE> FAILURE = 'F' <NEW_LINE> ERROR = 'E' <NEW_LINE> WARNING = 'W' <NEW_LINE> _jenkins = None <NEW_LINE> url = None <NEW_LINE> token = None <NEW_LINE> def __init__(self, url=None, token=None, load=True): <NEW_LINE> <INDENT> self.url = url or C.get('ci.url') <NEW_LINE> self.token = token or C.get('ci.token') <NEW_LINE> if load: <NEW_LINE> <INDENT> self.load() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def jenkins(self): <NEW_LINE> <INDENT> return self._jenkins <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> logger = logging.getLogger('jenkinsapi.job') <NEW_LINE> logger.setLevel(logging.WARNING) <NEW_LINE> logger = logging.getLogger('jenkinsapi.build') <NEW_LINE> logger.setLevel(logging.WARNING) <NEW_LINE> self._jenkins = jenkins.Jenkins(self.url, requester=CrumbRequester(baseurl=self.url)) <NEW_LINE> <DEDENT> def precheckRemoteBranch(self, remote, branch, integrateto, issue=None): <NEW_LINE> <INDENT> params = { 'remote': remote, 'branch': branch, 'integrateto': integrateto } <NEW_LINE> if issue: <NEW_LINE> <INDENT> params['issue'] = issue <NEW_LINE> <DEDENT> job = self.jenkins.get_job('Precheck remote branch') <NEW_LINE> try: <NEW_LINE> <INDENT> invoke = job.invoke(build_params=params, securitytoken=self.token, delay=5, block=True) <NEW_LINE> <DEDENT> except TimeOut: <NEW_LINE> <INDENT> raise CIException('The build has been in queue for too long. Aborting, please refer to: %s' % job.baseurl) <NEW_LINE> <DEDENT> except JenkinsAPIException: <NEW_LINE> <INDENT> raise CIException('Failed to invoke the build, check your permissions.') <NEW_LINE> <DEDENT> build = invoke.get_build() <NEW_LINE> logging.info('Waiting for the build to complete, please wait...') <NEW_LINE> build.block_until_complete(3) <NEW_LINE> outcome = CI.SUCCESS <NEW_LINE> infos = {'url': build.baseurl} <NEW_LINE> if build.is_good(): <NEW_LINE> <INDENT> logging.debug('Build complete, checking precheck results...') <NEW_LINE> output = build.get_console() <NEW_LINE> result = self.parseSmurfResult(output) <NEW_LINE> if not result: <NEW_LINE> <INDENT> outcome = CI.FAILURE <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outcome = result['smurf']['result'] <NEW_LINE> infos = dict(infos.items() + result.items()) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> outcome = CI.FAILURE <NEW_LINE> <DEDENT> return (outcome, infos) <NEW_LINE> <DEDENT> def parseSmurfResult(self, output): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for line in output.splitlines(): <NEW_LINE> <INDENT> if not line.startswith('SMURFRESULT'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> line = line.replace('SMURFRESULT: ', '') <NEW_LINE> (smurf, rest) = line.split(':') <NEW_LINE> elements = [smurf] <NEW_LINE> elements.extend(rest.split(';')) <NEW_LINE> for element in elements: <NEW_LINE> <INDENT> data = element.split(',') <NEW_LINE> errors = int(data[2]) <NEW_LINE> warnings = int(data[3]) <NEW_LINE> if errors > 0: <NEW_LINE> <INDENT> outcome = CI.ERROR <NEW_LINE> <DEDENT> elif warnings > 0: <NEW_LINE> <INDENT> outcome = CI.WARNING <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outcome = CI.SUCCESS <NEW_LINE> <DEDENT> result[data[0]] = { 'errors': errors, 'warnings': warnings, 'result': outcome } <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> return result
Wrapper for Jenkins
625990603eb6a72ae038bcf8
class StateMatchTest(integration.ModuleCase): <NEW_LINE> <INDENT> def test_issue_2167_exsel_no_AttributeError(self): <NEW_LINE> <INDENT> ret = self.run_function('state.top', ['issue-2167-exsel-match.sls']) <NEW_LINE> self.assertNotIn( "AttributeError: 'Matcher' object has no attribute 'functions'", ret ) <NEW_LINE> <DEDENT> def test_issue_2167_ipcidr_no_AttributeError(self): <NEW_LINE> <INDENT> subnets = self.run_function('network.subnets') <NEW_LINE> top_filename = 'issue-2167-ipcidr-match.sls' <NEW_LINE> top_file = os.path.join(STATE_DIR, top_filename) <NEW_LINE> try: <NEW_LINE> <INDENT> open(top_file, 'w').write( 'base:\n' ' {0}:\n' ' - match: ipcidr\n' ' - test\n'.format(subnets[0]) ) <NEW_LINE> ret = self.run_function('state.top', [top_filename]) <NEW_LINE> self.assertNotIn( "AttributeError: 'Matcher' object has no attribute 'functions'", ret ) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> os.remove(top_file)
Validate the file state
62599060a8370b77170f1a67
class DirectoryAuthority(Directory): <NEW_LINE> <INDENT> def __init__(self, address = None, or_port = None, dir_port = None, fingerprint = None, nickname = None, v3ident = None, is_bandwidth_authority = False): <NEW_LINE> <INDENT> super(DirectoryAuthority, self).__init__(address, or_port, dir_port, fingerprint) <NEW_LINE> self.nickname = nickname <NEW_LINE> self.v3ident = v3ident <NEW_LINE> self.is_bandwidth_authority = is_bandwidth_authority <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return _hash_attr(self, 'nickname', 'v3ident', 'is_bandwidth_authority', parent = Directory) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return hash(self) == hash(other) if isinstance(other, DirectoryAuthority) else False <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Tor directory authority, a special type of relay `hardcoded into tor <https://gitweb.torproject.org/tor.git/tree/src/or/config.c#n819>`_ that enumerates the other relays within the network. At a very high level tor works as follows... 1. A volunteer starts up a new tor relay, during which it sends a `server descriptor <server_descriptor.html>`_ to each of the directory authorities. 2. Each hour the directory authorities make a `vote <networkstatus.html>`_ that says who they think the active relays are in the network and some attributes about them. 3. The directory authorities send each other their votes, and compile that into the `consensus <networkstatus.html>`_. This document is very similar to the votes, the only difference being that the majority of the authorities agree upon and sign this document. The idividual relay entries in the vote or consensus is called `router status entries <router_status_entry.html>`_. 4. Tor clients (people using the service) download the consensus from one of the authorities or a mirror to determine the active relays within the network. They in turn use this to construct their circuits and use the network. .. versionchanged:: 1.3.0 Added the is_bandwidth_authority attribute. :var str nickname: nickname of the authority :var str v3ident: identity key fingerprint used to sign votes and consensus :var bool is_bandwidth_authority: **True** if this is a bandwidth authority, **False** otherwise
6259906056ac1b37e6303833
class NoImageTestCase(KartonMixin, TrackedTestCase): <NEW_LINE> <INDENT> def _verify_help_message(self): <NEW_LINE> <INDENT> self.assertIn('usage: karton ', self.current_text) <NEW_LINE> self.assertIn('positional arguments:', self.current_text) <NEW_LINE> self.assertIn('optional arguments:', self.current_text) <NEW_LINE> <DEDENT> def _verify_too_few_arguments(self): <NEW_LINE> <INDENT> self.assertIn('too few arguments; try "karton', self.current_text) <NEW_LINE> <DEDENT> @KartonMixin.run_and_spawn <NEW_LINE> def test_no_args(self, run_or_spawn): <NEW_LINE> <INDENT> run_or_spawn([], ignore_fail=True) <NEW_LINE> self._verify_too_few_arguments() <NEW_LINE> <DEDENT> @KartonMixin.run_and_spawn <NEW_LINE> def test_help(self, run_or_spawn): <NEW_LINE> <INDENT> run_or_spawn(['help']) <NEW_LINE> self.assert_exit_success() <NEW_LINE> self._verify_help_message() <NEW_LINE> <DEDENT> def test_incomplete_commands(self): <NEW_LINE> <INDENT> for cmd in ('run', 'shell', 'start', 'stop', 'status', 'build', 'image', 'image create', 'image import'): <NEW_LINE> <INDENT> self.spawn_karton(cmd.split(' '), ignore_fail=True) <NEW_LINE> self._verify_too_few_arguments() <NEW_LINE> self.assert_exit_fail() <NEW_LINE> <DEDENT> <DEDENT> @KartonMixin.run_and_spawn <NEW_LINE> def test_alias_query(self, run_or_spawn): <NEW_LINE> <INDENT> run_or_spawn(['alias']) <NEW_LINE> self.assert_exit_success() <NEW_LINE> self.assertEqual('', self.current_text) <NEW_LINE> run_or_spawn(['alias', 'invalid'], ignore_fail=True) <NEW_LINE> self.assert_exit_fail() <NEW_LINE> self.assertIn('"invalid" is not a known alias.', self.current_text) <NEW_LINE> run_or_spawn(['alias', 'invalid', 'not-an-image'], ignore_fail=True) <NEW_LINE> self.assert_exit_fail() <NEW_LINE> self.assertIn('Cannot add alias "invalid" for image "not-an-image" as the image does ' 'not exist.', self.current_text) <NEW_LINE> <DEDENT> @KartonMixin.run_and_spawn <NEW_LINE> def test_run_with_invalid_image(self, run_or_spawn): <NEW_LINE> <INDENT> for cmd in ('run', 'shell', 'start', 'stop', 'status', 'build'): <NEW_LINE> <INDENT> run_or_spawn([cmd, 'not-an-image'], ignore_fail=True) <NEW_LINE> self.assert_exit_fail() <NEW_LINE> self.assertIn('The image "not-an-image" doesn\'t exist.', self.current_text) <NEW_LINE> <DEDENT> <DEDENT> @KartonMixin.run_and_spawn <NEW_LINE> def test_no_images(self, run_or_spawn): <NEW_LINE> <INDENT> run_or_spawn(['image', 'list']) <NEW_LINE> self.assert_exit_success() <NEW_LINE> self.assertIn('No images configured.', self.current_text)
Test commands which don't require images to exist or Docker to be invoked.
625990603cc13d1c6d466ddb
class SAWarning(RuntimeWarning): <NEW_LINE> <INDENT> pass
Issued at runtime.
625990603617ad0b5ee077e7
class EmailAuth: <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(email=username) <NEW_LINE> if user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.onjects.get(pk=user_id) <NEW_LINE> if user.is_valid(): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None
Authenticate a user by an exact match on the email and password
625990607d847024c075da6e
class FacebookNotFoundError(Exception): <NEW_LINE> <INDENT> pass
Data expected from Facebook API response not found.
625990602ae34c7f260ac780
class ComplexType(Type): <NEW_LINE> <INDENT> def __init__(self, name, elt): <NEW_LINE> <INDENT> Type.__init__(self, name) <NEW_LINE> self.is_container = True <NEW_LINE> self.elt = elt <NEW_LINE> self.fields = [] <NEW_LINE> self.nmemb = 1 <NEW_LINE> self.size = 0 <NEW_LINE> self.lenfield_parent = [self] <NEW_LINE> self.fds = [] <NEW_LINE> <DEDENT> def resolve(self, module): <NEW_LINE> <INDENT> if self.resolved: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> pads = 0 <NEW_LINE> enum = None <NEW_LINE> for child in list(self.elt): <NEW_LINE> <INDENT> if child.tag == 'pad': <NEW_LINE> <INDENT> field_name = 'pad' + str(pads) <NEW_LINE> fkey = 'CARD8' <NEW_LINE> type = PadType(child) <NEW_LINE> pads = pads + 1 <NEW_LINE> visible = False <NEW_LINE> <DEDENT> elif child.tag == 'field': <NEW_LINE> <INDENT> field_name = child.get('name') <NEW_LINE> enum = child.get('enum') <NEW_LINE> fkey = child.get('type') <NEW_LINE> type = module.get_type(fkey) <NEW_LINE> visible = True <NEW_LINE> <DEDENT> elif child.tag == 'exprfield': <NEW_LINE> <INDENT> field_name = child.get('name') <NEW_LINE> fkey = child.get('type') <NEW_LINE> type = ExprType(child, module.get_type(fkey), *self.lenfield_parent) <NEW_LINE> visible = False <NEW_LINE> <DEDENT> elif child.tag == 'list': <NEW_LINE> <INDENT> field_name = child.get('name') <NEW_LINE> fkey = child.get('type') <NEW_LINE> type = ListType(child, module.get_type(fkey), *self.lenfield_parent) <NEW_LINE> visible = True <NEW_LINE> <DEDENT> elif child.tag == 'valueparam': <NEW_LINE> <INDENT> field_name = child.get('value-list-name') <NEW_LINE> fkey = 'CARD32' <NEW_LINE> type = ListType(child, module.get_type(fkey), *self.lenfield_parent) <NEW_LINE> visible = True <NEW_LINE> <DEDENT> elif child.tag == 'switch': <NEW_LINE> <INDENT> field_name = child.get('name') <NEW_LINE> field_type = self.name + (field_name,) <NEW_LINE> type = SwitchType(field_type, child, *self.lenfield_parent) <NEW_LINE> visible = True <NEW_LINE> type.make_member_of(module, self, field_type, field_name, visible, True, False) <NEW_LINE> type.resolve(module) <NEW_LINE> continue <NEW_LINE> <DEDENT> elif child.tag == 'fd': <NEW_LINE> <INDENT> fd_name = child.get('name') <NEW_LINE> type = module.get_type('INT32') <NEW_LINE> type.make_fd_of(module, self, fd_name) <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> field_type = module.get_type_name(fkey) <NEW_LINE> type.make_member_of(module, self, field_type, field_name, visible, True, False, enum) <NEW_LINE> type.resolve(module) <NEW_LINE> <DEDENT> self.calc_size() <NEW_LINE> self.resolved = True <NEW_LINE> <DEDENT> def calc_size(self): <NEW_LINE> <INDENT> self.size = 0 <NEW_LINE> for m in self.fields: <NEW_LINE> <INDENT> if not m.wire: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if m.type.fixed_size(): <NEW_LINE> <INDENT> self.size = self.size + (m.type.size * m.type.nmemb) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.size = None <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def fixed_size(self): <NEW_LINE> <INDENT> for m in self.fields: <NEW_LINE> <INDENT> if not m.type.fixed_size(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Derived class which represents a structure. Base type for all structure types. Public fields added: fields is an array of Field objects describing the structure fields.
62599060e64d504609df9f1b
class EnvReader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._setting_dict = {} <NEW_LINE> <DEDENT> def _normalize(self, key): <NEW_LINE> <INDENT> return key.strip() <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> value = self._setting_dict.get(key, None) <NEW_LINE> if not value: <NEW_LINE> <INDENT> value = os.getenv(key, None) <NEW_LINE> self._setting_dict.__setitem__(key, value) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> value = self.__getitem__(key) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> value = default <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def get_int(self, key, default=None): <NEW_LINE> <INDENT> value = self.__getitem__(key) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> return int(value) <NEW_LINE> <DEDENT> return default
Get the settings from the Environ. Usage: r = EnvReader() r.get('SETTING_VAR_NAME')
6259906066673b3332c31a96
class Word2VecFile(models.Model): <NEW_LINE> <INDENT> model = models.FileField() <NEW_LINE> syn1neg = models.FileField() <NEW_LINE> vectors = models.FileField() <NEW_LINE> def save(self, request): <NEW_LINE> <INDENT> for file in request.FILES.values(): <NEW_LINE> <INDENT> handle_uploaded_file(file)
Модель Word2Veс. 3 файла: модель, syn1neg.npy, vectors.npy
6259906091f36d47f22319dc
class EventListener(object): <NEW_LINE> <INDENT> def __init__(self, schedule): <NEW_LINE> <INDENT> self.my_schedule = schedule <NEW_LINE> <DEDENT> def schedule(self, hass): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def execute(self, hass): <NEW_LINE> <INDENT> pass
The base EventListener class that the schedule uses.
62599060d99f1b3c44d06d3d
class HelperMethods: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def decode_array_of_byte_strings(array): <NEW_LINE> <INDENT> output = [] <NEW_LINE> for byte_str in array: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> output.append(byte_str.decode()) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return output <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def remove_fp_key_candidates(key_candidates, blocklist): <NEW_LINE> <INDENT> reduced_key_candidates = [] <NEW_LINE> for key_candidate in key_candidates: <NEW_LINE> <INDENT> if key_candidate not in blocklist: <NEW_LINE> <INDENT> reduced_key_candidates.append(key_candidate) <NEW_LINE> <DEDENT> <DEDENT> return reduced_key_candidates
Static methods that can be reused for other classes
62599060462c4b4f79dbd09f
class ProductType(TimeStampedModel): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> slug = models.SlugField(unique=True) <NEW_LINE> objects = ProductTypeManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('name',) <NEW_LINE> verbose_name = 'Product type' <NEW_LINE> verbose_name_plural = 'Product types' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{}'.format(self.name)
Type of product e.g. course or membership.
625990600fa83653e46f6582
class GefAlias(gdb.Command): <NEW_LINE> <INDENT> def __init__(self, alias: str, command: str, completer_class: int = gdb.COMPLETE_NONE, command_class: int = gdb.COMMAND_NONE) -> None: <NEW_LINE> <INDENT> p = command.split() <NEW_LINE> if not p: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if any(x for x in gef.session.aliases if x._alias == alias): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._command = command <NEW_LINE> self._alias = alias <NEW_LINE> c = command.split()[0] <NEW_LINE> r = self.lookup_command(c) <NEW_LINE> self.__doc__ = f"Alias for '{Color.greenify(command)}'" <NEW_LINE> if r is not None: <NEW_LINE> <INDENT> _instance = r[2] <NEW_LINE> self.__doc__ += f": {_instance.__doc__}" <NEW_LINE> if hasattr(_instance, "complete"): <NEW_LINE> <INDENT> self.complete = _instance.complete <NEW_LINE> <DEDENT> <DEDENT> super().__init__(alias, command_class, completer_class=completer_class) <NEW_LINE> gef.session.aliases.append(self) <NEW_LINE> return <NEW_LINE> <DEDENT> def invoke(self, args: Any, from_tty: bool) -> None: <NEW_LINE> <INDENT> gdb.execute(f"{self._command} {args}", from_tty=from_tty) <NEW_LINE> return <NEW_LINE> <DEDENT> def lookup_command(self, cmd: str) -> Optional[Tuple[str, Type, Any]]: <NEW_LINE> <INDENT> global gef <NEW_LINE> for _name, _class, _instance in gef.gdb.loaded_commands: <NEW_LINE> <INDENT> if cmd == _name: <NEW_LINE> <INDENT> return _name, _class, _instance <NEW_LINE> <DEDENT> <DEDENT> return None
Simple aliasing wrapper because GDB doesn't do what it should.
62599060460517430c432ba0
class SANM(object): <NEW_LINE> <INDENT> def __init__(self, predicted): <NEW_LINE> <INDENT> if predicted.any() < 0: <NEW_LINE> <INDENT> raise ValueError("Predicted traffic matrix must be non-negative") <NEW_LINE> <DEDENT> self.predicted = np.matrix(predicted) <NEW_LINE> self.row_sums = self.predicted.sum(axis=1) <NEW_LINE> self.col_sums = self.predicted.sum(axis=0) <NEW_LINE> <DEDENT> def generate(self, beta, tol=1e-3): <NEW_LINE> <INDENT> tm_size = self.predicted.shape <NEW_LINE> tm_generated = np.zeros(tm_size) <NEW_LINE> for i in range(tm_generated.shape[0]): <NEW_LINE> <INDENT> for j in range(tm_generated.shape[1]): <NEW_LINE> <INDENT> tm_generated[i, j] = (np.sqrt(self.predicted[i, j]) + beta*gauss(0, 1))**2 <NEW_LINE> <DEDENT> <DEDENT> IPF().run(tm_generated, self.row_sums, self.col_sums, tol=tol) <NEW_LINE> return tm_generated
The Spherically Additive Noise Model (SANM) generates a purely spatial traffic matrix around a predicted traffic matrix. Require Iterative Proportional Fitting (IPF) from ipf.py.
625990602c8b7c6e89bd4e8a
class _TransmitterCache: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cache = [] <NEW_LINE> <DEDENT> def get_paths_to(self, directory, filename): <NEW_LINE> <INDENT> paths = [] <NEW_LINE> for dirpath, _, filenames in os.walk(directory): <NEW_LINE> <INDENT> for f in filenames: <NEW_LINE> <INDENT> if (f == filename): <NEW_LINE> <INDENT> paths.append(os.path.abspath(os.path.join(dirpath, f))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return paths <NEW_LINE> <DEDENT> def load_driver_package(self, module, n): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> driver_package = None <NEW_LINE> try: <NEW_LINE> <INDENT> driver_package_path = ( module.driver_package_path ) <NEW_LINE> driver_loader = ( importlib.machinery.SourceFileLoader( "driver_package_" + str(n), driver_package_path ) ) <NEW_LINE> driver_package = driver_loader.load_module() <NEW_LINE> return driver_package <NEW_LINE> <DEDENT> except AttributeError as e: <NEW_LINE> <INDENT> driver_package_name = module.driver_package_name <NEW_LINE> driver_package = __import__(driver_package_name) <NEW_LINE> return driver_package <NEW_LINE> <DEDENT> <DEDENT> except (AttributeError, FileNotFoundError) as e: <NEW_LINE> <INDENT> logger.error("Failed to import driver package: " + str(e)) <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> def build(self, directory): <NEW_LINE> <INDENT> paths = self.get_paths_to(directory, transmitter_file_name) <NEW_LINE> n = 0 <NEW_LINE> for p in paths: <NEW_LINE> <INDENT> module_name = "transmitter_file_" + str(n) <NEW_LINE> if module_name in sys.modules: <NEW_LINE> <INDENT> del sys.modules[module_name] <NEW_LINE> <DEDENT> n += 1 <NEW_LINE> loader = importlib.machinery.SourceFileLoader(module_name, p) <NEW_LINE> try: <NEW_LINE> <INDENT> module = loader.load_module() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error( "The transmitter file in the path "+p+" contained "+ "the following error: " + str(e) ) <NEW_LINE> continue <NEW_LINE> <DEDENT> driver_package = self.load_driver_package(module, n) <NEW_LINE> try: <NEW_LINE> <INDENT> self.cache.append( _TransmitterIdent( module.idVendor, module.idProduct, module.manufacturer, module.product, module.channel_units, driver_package.open_method, driver_package.read_channel_method, driver_package.close_method ) ) <NEW_LINE> <DEDENT> except AttributeError as e: <NEW_LINE> <INDENT> logger.error( "The transmitter file in the path " + p + " contained the following error: " + str(e) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def find_by_vid_pid(self, vid, pid): <NEW_LINE> <INDENT> for i in self.cache: <NEW_LINE> <INDENT> if i.matches(vid, pid): return i
cache of transmitters from the transmitter package based on the transmitter_file_name modules inside, which contain the manufacturer, product, product and vendor ids of each transmitter within the system
62599060498bea3a75a5914c
class IOffer(model.Schema): <NEW_LINE> <INDENT> file = NamedBlobFile( title=_(u'Offer'), description=_(u'Please upload an offer'), required=False, )
Offer Content Type
6259906099cbb53fe683257d
class PeriodCfg(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.b_key = "period-cfg" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.week = "" <NEW_LINE> self.A10WW_all = "" <NEW_LINE> self.period = "" <NEW_LINE> self.month = "" <NEW_LINE> self.date = "" <NEW_LINE> self.day = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
This class does not support CRUD Operations please use parent. :param week: {"default": 0, "type": "number", "description": "Most recent week", "format": "flag"} :param all: {"default": 0, "type": "number", "description": "all log", "format": "flag"} :param period: {"default": 0, "type": "number", "description": "Specify backup period", "format": "flag"} :param month: {"default": 0, "type": "number", "description": " Most recent day", "format": "flag"} :param date: {"description": "specify number of days", "minimum": 1, "type": "number", "maximum": 31, "format": "number"} :param day: {"default": 0, "type": "number", "description": "Most recent day", "format": "flag"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
625990602ae34c7f260ac782
class KeyView(View): <NEW_LINE> <INDENT> exception = IndexError <NEW_LINE> def __init__(self, view, key): <NEW_LINE> <INDENT> setattribute(self, 'view', view) <NEW_LINE> setattribute(self, 'key', key) <NEW_LINE> setattribute(self, 'fetch', lambda: getitem(view, key)) <NEW_LINE> <DEDENT> def nonex(self): <NEW_LINE> <INDENT> view = getattribute(self, 'view') <NEW_LINE> key = getattribute(self, 'key') <NEW_LINE> return ("key", key, view)
Class for views of parameters derived by indexing.
625990604e4d562566373aa3
class HMACAuth(requests.auth.AuthBase): <NEW_LINE> <INDENT> def __init__(self, username, secret, message): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.secret = secret <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> hmac_hash = hmac.new(str(self.secret), str(self.message), sha1).hexdigest() <NEW_LINE> header = str(self.username) <NEW_LINE> header += ":" <NEW_LINE> header += hmac_hash <NEW_LINE> request.headers['Authorization'] = header <NEW_LINE> return request
Class defining HMAC authentication This extends the base authentication class from the requests package.
62599060097d151d1a2c270b
class ChannelBD(list): <NEW_LINE> <INDENT> def __init__(self, channel_id, server_id): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.serverId = server_id <NEW_LINE> self.channelId = channel_id
A ChannelBD is made of a channel id and a server id.
6259906029b78933be26ac12
class PersistentManager: <NEW_LINE> <INDENT> def save(key, value): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get(): pass
Persistent a variable. When a field is assigend a value, the value will be persistented, accrossed from python runs # Create a pm, given an indentifier(works like a scope for all assigend attributes). Teh same indentifier will return same python object accross python runs. pm = PersistentManager('name') # Persistent a value named 'start_date' with value '01/08/2018' pm.start_date = '01/08/2018' # Get the value named 'start_data' pm.start_date Maybe the default shelv already provide this functionality. An inner implementation might be: persistent all data in file systems. Raises: access to a missing attribute should raise a error. Then means, you must first create a attribute. Problems: 1. Should updating an attribute seperated from creating an attribute.
625990601f037a2d8b9e53b9
class ShapedTransport(ShapedConsumer): <NEW_LINE> <INDENT> iAmStreaming = False <NEW_LINE> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.consumer, name)
Wraps a C{Transport} and shapes the rate at which it receives data. This is a L{ShapedConsumer} with a little bit of magic to provide for the case where the consumer it wraps is also a C{Transport} and people will be attempting to access attributes this does not proxy as a C{Consumer} (e.g. C{loseConnection}).
62599060b7558d5895464a7b
class QCellToolBarMergeCells(QtGui.QAction): <NEW_LINE> <INDENT> def __init__(self, icon, parent=None): <NEW_LINE> <INDENT> QtGui.QAction.__init__(self, icon, "&Merge cells", parent) <NEW_LINE> self.setStatusTip("Merge selected cells to a single cell if " "they are in consecutive poisitions") <NEW_LINE> self.setCheckable(True) <NEW_LINE> <DEDENT> def triggeredSlot(self): <NEW_LINE> <INDENT> if self.isChecked(): <NEW_LINE> <INDENT> sheet = self.toolBar.sheet <NEW_LINE> selectedCells = sorted(sheet.getSelectedLocations()) <NEW_LINE> topLeft = selectedCells[0] <NEW_LINE> bottomRight = selectedCells[-1] <NEW_LINE> sheet.setSpan(topLeft[0], topLeft[1], bottomRight[0]-topLeft[0]+1, bottomRight[1]-topLeft[1]+1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sheet = self.toolBar.sheet <NEW_LINE> selectedCells = sorted(sheet.getSelectedLocations()) <NEW_LINE> for (row, col) in selectedCells: <NEW_LINE> <INDENT> sheet.setSpan(row, col, 1, 1) <NEW_LINE> <DEDENT> <DEDENT> sheet.clearSelection() <NEW_LINE> self.toolBar.updateToolBar() <NEW_LINE> <DEDENT> def updateStatus(self, info): <NEW_LINE> <INDENT> (sheet, row, col, cellWidget) = info <NEW_LINE> selectedCells = sorted(sheet.getSelectedLocations()) <NEW_LINE> if len(selectedCells)==0: <NEW_LINE> <INDENT> self.setVisible(False) <NEW_LINE> <DEDENT> elif len(selectedCells)==1: <NEW_LINE> <INDENT> showUp = False <NEW_LINE> if selectedCells[0]==(row, col): <NEW_LINE> <INDENT> span = sheet.getSpan(row, col) <NEW_LINE> if span[0]>1 or span[1]>1: <NEW_LINE> <INDENT> showUp = True <NEW_LINE> <DEDENT> <DEDENT> if showUp: <NEW_LINE> <INDENT> self.setChecked(True) <NEW_LINE> self.setVisible(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setVisible(False) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> showUp = False <NEW_LINE> validRange = False <NEW_LINE> topLeft = selectedCells[0] <NEW_LINE> bottomRight = selectedCells[-1] <NEW_LINE> fullCount = (bottomRight[0]-topLeft[0]+1)*(bottomRight[1]-topLeft[1]+1) <NEW_LINE> validRange = len(selectedCells)==fullCount <NEW_LINE> if validRange: <NEW_LINE> <INDENT> showUp = True <NEW_LINE> for (r, c) in selectedCells: <NEW_LINE> <INDENT> span = sheet.getSpan(r, c) <NEW_LINE> if span[0]>1 or span[1]>1: <NEW_LINE> <INDENT> showUp = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if showUp: <NEW_LINE> <INDENT> self.setChecked(False) <NEW_LINE> self.setVisible(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setVisible(False)
QCellToolBarMergeCells is the action to merge selected cells to a single cell if they are in consecutive poisitions
62599060e76e3b2f99fda09c
class LiteracyAndLanguageResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
Retrieve the value for the "Response" output from this choreography execution. ((xml) The response from Donor's Choose)
62599060f7d966606f749407
@abstract <NEW_LINE> class ConnectableElement( _user_module.ConnectableElementMixin, TypedElement, ParameterableElement): <NEW_LINE> <INDENT> end = EReference(ordered=False, unique=True, containment=False, derived=True, upper=-1, transient=True, derived_class=_user_module.DerivedEnd) <NEW_LINE> def __init__(self, end=None, **kwargs): <NEW_LINE> <INDENT> super(ConnectableElement, self).__init__(**kwargs) <NEW_LINE> if end: <NEW_LINE> <INDENT> self.end.extend(end)
ConnectableElement is an abstract metaclass representing a set of instances that play roles of a StructuredClassifier. ConnectableElements may be joined by attached Connectors and specify configurations of linked instances to be created within an instance of the containing StructuredClassifier. <p>From package UML::StructuredClassifiers.</p>
6259906001c39578d7f14283
class TrainingCenterCreateView( LoginRequiredMixin, TrainingCenterMixin, CreateView): <NEW_LINE> <INDENT> context_object_name = 'trainingcenter' <NEW_LINE> template_name = 'training_center/create.html' <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> return reverse('certifyingorganisation-detail', kwargs={ 'project_slug': self.object.certifying_organisation.project.slug, 'slug': self.object.certifying_organisation.slug, }) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super( TrainingCenterCreateView, self).get_context_data(**kwargs) <NEW_LINE> context['trainingcenters'] = self.get_queryset() .filter(certifying_organisation=self.certifying_organisation) <NEW_LINE> context['organisation'] = self.certifying_organisation <NEW_LINE> return context <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super(TrainingCenterCreateView, self).form_valid(form) <NEW_LINE> return HttpResponseRedirect(self.get_success_url()) <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> return ValidationError( 'ERROR: Training Center by this name is already exists!') <NEW_LINE> <DEDENT> <DEDENT> def get_form_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(TrainingCenterCreateView, self).get_form_kwargs() <NEW_LINE> self.organisation_slug = self.kwargs.get('organisation_slug', None) <NEW_LINE> self.certifying_organisation = CertifyingOrganisation.objects.get(slug=self.organisation_slug) <NEW_LINE> kwargs.update({ 'user': self.request.user, 'certifying_organisation': self.certifying_organisation }) <NEW_LINE> return kwargs
Create view for Training Center.
625990600fa83653e46f6584
class TestFiles(TestCase): <NEW_LINE> <INDENT> def assert_from_file(self, input, expected): <NEW_LINE> <INDENT> z = open("input.txt", "w") <NEW_LINE> for i in input: <NEW_LINE> <INDENT> z.write(str(i)) <NEW_LINE> z.write("\n") <NEW_LINE> <DEDENT> z.close() <NEW_LINE> parse_file("input.txt") <NEW_LINE> o = [] <NEW_LINE> with open("out.txt", "r") as lines: <NEW_LINE> <INDENT> for line in lines: <NEW_LINE> <INDENT> o.append(line.rstrip("\n")) <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(expected, o) <NEW_LINE> <DEDENT> def test_get_numbers(self): <NEW_LINE> <INDENT> self.assert_from_file(["1", "2", "3"], ["1", "2", "3"]) <NEW_LINE> <DEDENT> def test_get_numbers_intercalated(self): <NEW_LINE> <INDENT> self.assert_from_file(["1", "abc", "2", "abc2", "3"], ["1", "2", "3"]) <NEW_LINE> <DEDENT> def test_get_numbers_none(self): <NEW_LINE> <INDENT> self.assert_from_file(["abc", "abc2"], []) <NEW_LINE> <DEDENT> def test_get_numbers_ints(self): <NEW_LINE> <INDENT> self.assert_from_file(["abc", "abc2", 1, 2, 3], ["1", "2", "3"]) <NEW_LINE> <DEDENT> def test_get_numbers_floats(self): <NEW_LINE> <INDENT> self.assert_from_file(["abc", "1.0", "abc2", 2.0, 3], ["1.0", "2.0", "3"])
Tests from file.
6259906091af0d3eaad3b4c4
class PolarDBAccountModel(BaseModel): <NEW_LINE> <INDENT> model_name = 'POLARDB账号' <NEW_LINE> model_sign = 'polardb_account' <NEW_LINE> PASSWORD_PERMISSION = 'polardb-account-password' <NEW_LINE> polardb = models.ForeignKey(PolarDBModel, on_delete=models.CASCADE) <NEW_LINE> username = models.CharField('用户名', max_length=128) <NEW_LINE> password = models.CharField('密码', max_length=128) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'polardb_account' <NEW_LINE> <DEDENT> def to_dict(self, has_password=False): <NEW_LINE> <INDENT> data = super().to_dict() <NEW_LINE> if not has_password: <NEW_LINE> <INDENT> data['password'] = '******' <NEW_LINE> <DEDENT> return data
数据库账号
625990606e29344779b01ceb
class VolumeDriverCompatibility(test.TestCase): <NEW_LINE> <INDENT> def fake_update_cluster_status(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> super(VolumeDriverCompatibility, self).setUp() <NEW_LINE> self.manager = importutils.import_object(CONF.volume_manager) <NEW_LINE> self.context = context.get_admin_context() <NEW_LINE> <DEDENT> def _load_driver(self, driver): <NEW_LINE> <INDENT> self.manager.__init__(volume_driver=driver) <NEW_LINE> <DEDENT> def _driver_module_name(self): <NEW_LINE> <INDENT> return "%s.%s" % (self.manager.driver.__class__.__module__, self.manager.driver.__class__.__name__) <NEW_LINE> <DEDENT> def test_storwize_svc_old(self): <NEW_LINE> <INDENT> self._load_driver( 'cinder.volume.drivers.storwize_svc.StorwizeSVCDriver') <NEW_LINE> self.assertEqual(self._driver_module_name(), STORWIZE_MODULE) <NEW_LINE> <DEDENT> def test_storwize_svc_old2(self): <NEW_LINE> <INDENT> self._load_driver('cinder.volume.drivers.storwize_svc.' 'StorwizeSVCDriver') <NEW_LINE> self.assertEqual(self._driver_module_name(), STORWIZE_MODULE) <NEW_LINE> <DEDENT> def test_storwize_svc_new(self): <NEW_LINE> <INDENT> self._load_driver(STORWIZE_MODULE) <NEW_LINE> self.assertEqual(self._driver_module_name(), STORWIZE_MODULE) <NEW_LINE> <DEDENT> def test_hp_lefthand_rest_old(self): <NEW_LINE> <INDENT> self._load_driver( 'cinder.volume.drivers.san.hp_lefthand.HpSanISCSIDriver') <NEW_LINE> self.assertEqual(self._driver_module_name(), LEFTHAND_REST_MODULE) <NEW_LINE> <DEDENT> def test_hp_lefthand_rest_new(self): <NEW_LINE> <INDENT> self._load_driver(LEFTHAND_REST_MODULE) <NEW_LINE> self.assertEqual(self._driver_module_name(), LEFTHAND_REST_MODULE) <NEW_LINE> <DEDENT> def test_gpfs_old(self): <NEW_LINE> <INDENT> self._load_driver('cinder.volume.drivers.gpfs.GPFSDriver') <NEW_LINE> self.assertEqual(self._driver_module_name(), GPFS_MODULE) <NEW_LINE> <DEDENT> def test_gpfs_new(self): <NEW_LINE> <INDENT> self._load_driver(GPFS_MODULE) <NEW_LINE> self.assertEqual(self._driver_module_name(), GPFS_MODULE)
Test backwards compatibility for volume drivers.
625990604e4d562566373aa4
class BaseScript(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> script_active = True <NEW_LINE> @abc.abstractmethod <NEW_LINE> def run(self): <NEW_LINE> <INDENT> pass
All scripts should inherit this type!
6259906056ac1b37e6303835
class InvalidOrderError(OrderError): <NEW_LINE> <INDENT> pass
Exceptions regarding invalid order amounts
62599060442bda511e95d8a8
class FileManager(models.Manager): <NEW_LINE> <INDENT> def get_or_create_for_object(self, obj, file_path, url=None): <NEW_LINE> <INDENT> ctype = ContentType.objects.get_for_model(obj) <NEW_LINE> file, new = self.get_or_create( path = file_path, defaults = { 'content_type': ctype, 'object_id': obj.id, 'url': url, } ) <NEW_LINE> return file
Default manager for File model class
62599060627d3e7fe0e08528
class TypeCompiler(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def buildtype(self): <NEW_LINE> <INDENT> baT = (AbstractCut, RestrictionType) <NEW_LINE> cuT = (NoCut, OneCut, TwoCuts) <NEW_LINE> meT = (Meth_Dep, Meth_Undep) <NEW_LINE> paT = (Palindromic, NonPalindromic) <NEW_LINE> ovT = (Unknown, Blunt, Ov5, Ov3) <NEW_LINE> deT = (NotDefined, Defined, Ambiguous) <NEW_LINE> coT = (Commercially_available, Not_available) <NEW_LINE> types = [ (p, c, o, d, m, co, baT[0], baT[1]) for p in paT for c in cuT for o in ovT for d in deT for m in meT for co in coT ] <NEW_LINE> n = 1 <NEW_LINE> for ty in types: <NEW_LINE> <INDENT> dct = {} <NEW_LINE> for t in ty: <NEW_LINE> <INDENT> dct.update(t.__dict__) <NEW_LINE> dct["results"] = [] <NEW_LINE> dct["substrat"] = "DNA" <NEW_LINE> dct["dna"] = None <NEW_LINE> if t == NoCut: <NEW_LINE> <INDENT> dct.update( { "fst5": None, "fst3": None, "scd5": None, "scd3": None, "ovhg": None, "ovhgseq": None, } ) <NEW_LINE> <DEDENT> elif t == OneCut: <NEW_LINE> <INDENT> dct.update({"scd5": None, "scd3": None}) <NEW_LINE> <DEDENT> <DEDENT> class klass(type): <NEW_LINE> <INDENT> def __new__(cls): <NEW_LINE> <INDENT> return type.__new__(cls, "type%i" % n, ty, dct) <NEW_LINE> <DEDENT> def __init__(cls): <NEW_LINE> <INDENT> super(klass, cls).__init__("type%i" % n, ty, dct) <NEW_LINE> <DEDENT> <DEDENT> yield klass() <NEW_LINE> n += 1
Build the different types possible for Restriction Enzymes.
625990603539df3088ecd93a
class AccessReason(enum.IntEnum): <NEW_LINE> <INDENT> UNSPECIFIED = 0 <NEW_LINE> UNKNOWN = 1 <NEW_LINE> OWNED = 2 <NEW_LINE> SHARED = 3 <NEW_LINE> LICENSED = 4 <NEW_LINE> SUBSCRIBED = 5 <NEW_LINE> AFFILIATED = 6
Enum describing possible access reasons. Attributes: UNSPECIFIED (int): Not specified. UNKNOWN (int): Used for return value only. Represents value unknown in this version. OWNED (int): The resource is owned by the user. SHARED (int): The resource is shared to the user. LICENSED (int): The resource is licensed to the user. SUBSCRIBED (int): The user subscribed to the resource. AFFILIATED (int): The resource is accessible to the user.
62599060fff4ab517ebceec4
class TemporalInformationStore(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.queue = Queue.Queue() <NEW_LINE> <DEDENT> def get(self, timeout = None): <NEW_LINE> <INDENT> if timeout is None: <NEW_LINE> <INDENT> real_timeout = 0.05 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> real_timeout = timeout <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.queue.get(True, real_timeout) <NEW_LINE> <DEDENT> except Queue.Empty: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def empty(self): <NEW_LINE> <INDENT> return self.queue.empty() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def put(self, *args, **kwargs): <NEW_LINE> <INDENT> pass
Temporal synchronized store for initial and finishing information. The coordinator will be asking the experiment whether it has finished or not. Given that this process is not synchronized with the UserProcessingManager, not even with the user session (if an experiment takes a long time and the user session has finished, we still want to store the results of that experiment). Therefore, this store will store the information. From time to time, the UserProcessingManager will call this store to ask for information to store in the database. This class provides synchronized solution, so the UPS will be able to get blocked until some information is available.
625990608e7ae83300eea72b
class RegisterClientForm(Form): <NEW_LINE> <INDENT> title = wtf.TextField('Application title', validators=[wtf.Required()], description="The name of your application") <NEW_LINE> description = wtf.TextAreaField('Description', validators=[wtf.Required()], description="A description to help users recognize your application") <NEW_LINE> client_owner = wtf.RadioField('Owner', validators=[wtf.Required()], description="User or organization that owns this application. Changing the owner " "will revoke all currently assigned permissions for this app") <NEW_LINE> website = wtf.html5.URLField('Application website', validators=[wtf.Required(), wtf.URL()], description="Website where users may access this application") <NEW_LINE> redirect_uri = wtf.html5.URLField('Redirect URI', validators=[wtf.Optional(), wtf.URL()], description="OAuth2 Redirect URI") <NEW_LINE> notification_uri = wtf.html5.URLField('Notification URI', validators=[wtf.Optional(), wtf.URL()], description="Lastuser resource provider Notification URI. When another application requests access to " "resources provided by this app, Lastuser will post a notice to this URI with a copy of the access " "token that was provided to the other application. Other notices may be posted too " "(not yet implemented)") <NEW_LINE> iframe_uri = wtf.html5.URLField('IFrame URI', validators=[wtf.Optional(), wtf.URL()], description="Front-end notifications URL. This is loaded in a hidden iframe to notify the app that the " "user updated their profile in some way (not yet implemented)") <NEW_LINE> resource_uri = wtf.html5.URLField('Resource URI', validators=[wtf.Optional(), wtf.URL()], description="URI at which this application provides resources as per the Lastuser Resource API " "(not yet implemented)") <NEW_LINE> allow_any_login = wtf.BooleanField('Allow anyone to login', default=True, description="If your application requires access to be restricted to specific users, uncheck this, " "and only users who have been assigned a permission to the app will be able to login") <NEW_LINE> team_access = wtf.BooleanField('Requires access to teams', default=False, description="If your application is capable of assigning access permissions to teams, check this. " "Organization owners will then able to grant access to teams in their organizations") <NEW_LINE> def validate_client_owner(self, field): <NEW_LINE> <INDENT> if field.data == g.user.userid: <NEW_LINE> <INDENT> self.user = g.user <NEW_LINE> self.org = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> orgs = [org for org in g.user.organizations_owned() if org.userid == field.data] <NEW_LINE> if len(orgs) != 1: <NEW_LINE> <INDENT> raise wtf.ValidationError("Invalid owner") <NEW_LINE> <DEDENT> self.user = None <NEW_LINE> self.org = orgs[0]
Register a new OAuth client application
625990605166f23b2e244a6f
class TaskDispatcher: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.sequencer = Sequencer() <NEW_LINE> self.tick = 0.05 <NEW_LINE> self.status = "Stopped" <NEW_LINE> pub.subscribe(self.process_cmd_queue, 'Command') <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if self.status == "Running": <NEW_LINE> <INDENT> self.sequencer.run_once() <NEW_LINE> Timer(self.tick, self.run).start() <NEW_LINE> <DEDENT> <DEDENT> def process_cmd_queue(self, cmd, data): <NEW_LINE> <INDENT> self.process_sequencer_commands(cmd, data) <NEW_LINE> <DEDENT> def process_sequencer_commands(self, cmd, data): <NEW_LINE> <INDENT> if cmd == "Seq_Start": <NEW_LINE> <INDENT> self.sequencer.status = "Running" <NEW_LINE> return True <NEW_LINE> <DEDENT> if cmd == "Seq_Pause": <NEW_LINE> <INDENT> self.sequencer.status = "Paused" <NEW_LINE> return True <NEW_LINE> <DEDENT> if cmd == "Seq_Stop": <NEW_LINE> <INDENT> self.sequencer.status = "Stopped" <NEW_LINE> return True <NEW_LINE> <DEDENT> if cmd == "Seq_Load_Tests": <NEW_LINE> <INDENT> self.sequencer.load(data) <NEW_LINE> return True <NEW_LINE> <DEDENT> if cmd == "Seq_Clear_Tests": <NEW_LINE> <INDENT> self.sequencer.clear_tests() <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.status = "Running" <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.status = "Stopped"
This class listens on the cmd queue and dispatches the data and commands to the relevant functions. It is the main running loop in the software as it receives input from the command queue as well as distributing the results.
62599060adb09d7d5dc0bc08
class CardBlock(blocks.StructBlock): <NEW_LINE> <INDENT> title = blocks.CharBlock(required=False, help_text="Add your title") <NEW_LINE> cards = blocks.ListBlock( blocks.StructBlock( [ ("image", ImageChooserBlock(required=False)), ("title", blocks.CharBlock(required=False,max_length=40)), ("text", blocks.TextBlock(required=False,max_length=500)), ("rich_text", blocks.RichTextBlock(required=False)), ("button_page", blocks.PageChooserBlock(required=False)), ( "button_url", blocks.URLBlock( required=False, help_text="If the button page above is selected, that will be used first.", ), ), ] ) ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> template = "streams/card_block.html" <NEW_LINE> icon = "placeholder" <NEW_LINE> label = "Staff Cards"
Cards with image and text and button(s).
62599060796e427e5384fe14
class OnkyoDevice(MediaPlayerDevice): <NEW_LINE> <INDENT> def __init__(self, receiver, sources, name=None): <NEW_LINE> <INDENT> self._receiver = receiver <NEW_LINE> self._muted = False <NEW_LINE> self._volume = 0 <NEW_LINE> self._pwstate = STATE_OFF <NEW_LINE> self._name = name or '{}_{}'.format( receiver.info['model_name'], receiver.info['identifier']) <NEW_LINE> self._current_source = None <NEW_LINE> self._source_list = list(sources.values()) <NEW_LINE> self._source_mapping = sources <NEW_LINE> self._reverse_mapping = {value: key for key, value in sources.items()} <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> status = self._receiver.command('system-power query') <NEW_LINE> if status[1] == 'on': <NEW_LINE> <INDENT> self._pwstate = STATE_ON <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._pwstate = STATE_OFF <NEW_LINE> return <NEW_LINE> <DEDENT> volume_raw = self._receiver.command('volume query') <NEW_LINE> mute_raw = self._receiver.command('audio-muting query') <NEW_LINE> current_source_raw = self._receiver.command('input-selector query') <NEW_LINE> for source in current_source_raw[1]: <NEW_LINE> <INDENT> if source in self._source_mapping: <NEW_LINE> <INDENT> self._current_source = self._source_mapping[source] <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._current_source = '_'.join( [i for i in current_source_raw[1]]) <NEW_LINE> <DEDENT> <DEDENT> self._muted = bool(mute_raw[1] == 'on') <NEW_LINE> self._volume = int(volume_raw[1], 16) / 80.0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._pwstate <NEW_LINE> <DEDENT> @property <NEW_LINE> def volume_level(self): <NEW_LINE> <INDENT> return self._volume <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_volume_muted(self): <NEW_LINE> <INDENT> return self._muted <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_media_commands(self): <NEW_LINE> <INDENT> return SUPPORT_ONKYO <NEW_LINE> <DEDENT> @property <NEW_LINE> def source(self): <NEW_LINE> <INDENT> return self._current_source <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_list(self): <NEW_LINE> <INDENT> return self._source_list <NEW_LINE> <DEDENT> def turn_off(self): <NEW_LINE> <INDENT> self._receiver.command('system-power standby') <NEW_LINE> <DEDENT> def set_volume_level(self, volume): <NEW_LINE> <INDENT> self._receiver.command('volume {}'.format(int(volume*80))) <NEW_LINE> <DEDENT> def mute_volume(self, mute): <NEW_LINE> <INDENT> if mute: <NEW_LINE> <INDENT> self._receiver.command('audio-muting on') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._receiver.command('audio-muting off') <NEW_LINE> <DEDENT> <DEDENT> def turn_on(self): <NEW_LINE> <INDENT> self._receiver.power_on() <NEW_LINE> <DEDENT> def select_source(self, source): <NEW_LINE> <INDENT> if source in self._source_list: <NEW_LINE> <INDENT> source = self._reverse_mapping[source] <NEW_LINE> <DEDENT> self._receiver.command('input-selector {}'.format(source))
Representation of an Onkyo device.
62599060b7558d5895464a7c
class Resource(object): <NEW_LINE> <INDENT> TIME_FIELDS = { 'public_date', 'modified_date', 'updated', 'issued' } <NEW_LINE> def __init__(self, name, body=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self._name = name <NEW_LINE> self.raw = body <NEW_LINE> self._body = body <NEW_LINE> self.load() <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> if not isinstance(self.raw, dict): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> self._body = self.raw.copy() <NEW_LINE> for key, value in self.raw.items(): <NEW_LINE> <INDENT> self._set_key(key, value) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def _set_key(self, key, value): <NEW_LINE> <INDENT> if value and key in self.TIME_FIELDS: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = iso8601.parse_date(value) <NEW_LINE> <DEDENT> except iso8601.ParseError as err: <NEW_LINE> <INDENT> raise APIException('Attribute "{}": {}'.format(key, err), self.raw) <NEW_LINE> <DEDENT> self._body[key] = value <NEW_LINE> <DEDENT> setattr(self, key, value) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._body) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(self._body, attr) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self._body[item] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._body) <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self._body <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Resource {}>'.format(self._name)
Holds processed part of response data. Args: name: Name of the resource (e.g. bash-0:4.2.46-20.el7_2.x86_64) body: Resource data
625990601f037a2d8b9e53ba
class DBLoadTable(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.db = db() <NEW_LINE> self.db.create_database(database_id="test_load_table") <NEW_LINE> self.db.create_schema(database_id="test_load_table", schema_id="test") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.db.delete_schema(database_id="test_load_table", schema_id="test") <NEW_LINE> self.db.delete_database(database_id="test_load_table") <NEW_LINE> self.db.close() <NEW_LINE> <DEDENT> def test_load_table_too_many_arguments(self): <NEW_LINE> <INDENT> with self.assertRaises(Exception): <NEW_LINE> <INDENT> self.db.load_table( database_id="test_load_table", schema_id="test", table_id="test_load_table_too_many_arguments", query="query", file_query="path_to_query" ) <NEW_LINE> <DEDENT> <DEDENT> def test_load_table_no_arguments(self): <NEW_LINE> <INDENT> with self.assertRaises(Exception): <NEW_LINE> <INDENT> self.db.load_table( database_id="test_load_table", schema_id="test", table_id="test_load_table_no_arguments" ) <NEW_LINE> <DEDENT> <DEDENT> def test_load_table_table_does_not_exists_no_ddl(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_load_table_table_does_not_exists_with_ddl(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_load_table_table_exists_and_truncate(self): <NEW_LINE> <INDENT> self.db.query_exec(query="create table test_load_table.test.test_load_table_table_exists_and_truncate as select 1 as col1") <NEW_LINE> self.db.load_table( database_id="test_load_table", schema_id="test", table_id="test_load_table_table_exists_and_truncate", query="select 2 as col1", truncate=True ) <NEW_LINE> expected_df = pd.DataFrame(data=[2], columns = ['col1']) <NEW_LINE> returned_df = self.db.query_exec(query="select * from test_load_table.test.test_load_table_table_exists_and_truncate", return_df=True) <NEW_LINE> df_assert_equal(expected_df, returned_df) <NEW_LINE> <DEDENT> def test_load_table_table_exists_no_truncate(self): <NEW_LINE> <INDENT> self.db.query_exec(query="create table test_load_table.test.test_load_table_table_exists_no_truncate as select 1 as col1") <NEW_LINE> self.db.load_table( database_id="test_load_table", schema_id="test", table_id="test_load_table_table_exists_no_truncate", query="select 2 as col1", truncate=False ) <NEW_LINE> expected_df = pd.DataFrame(data=[1,2], columns = ['col1']) <NEW_LINE> returned_df = self.db.query_exec(query="select * from test_load_table.test.test_load_table_table_exists_no_truncate", return_df=True) <NEW_LINE> df_assert_equal(expected_df, returned_df)
Test
62599060379a373c97d9a6c2
class NoSuchBranch(NameLookupFailed): <NEW_LINE> <INDENT> _message_prefix = "No such branch"
Raised when we try to load a branch that does not exist.
62599060097d151d1a2c270e
class Settings(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 800 <NEW_LINE> self.screen_height = 600 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_speed_factor = 1.5 <NEW_LINE> self.ship_limit = 3 <NEW_LINE> self.bullet_speed_factor = 1 <NEW_LINE> self.bullet_width = 300 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = 60, 60, 60 <NEW_LINE> self.bullets_allowed = 3 <NEW_LINE> self.alien_speed_factor = 1 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.fleet_direction = 1
A class to store all settings for Alien Invasion
625990600c0af96317c578ae
class CfgParameterDialog(ParameterDialog): <NEW_LINE> <INDENT> def _create_additional_controls(self): <NEW_LINE> <INDENT> self.add_to_layout(wx.TextCtrl(self, validator=TextValidator(self, "verif"), size=(150, 26)), "Regular expression") <NEW_LINE> self.add_to_layout(wx.TextCtrl(self, validator=TextValidator(self, "default"), size=(150, 26)), "Default value") <NEW_LINE> <DEDENT> def __init__(self, parent=None, paramname="", paramtype="num", bitsize=8, verif="", default=""): <NEW_LINE> <INDENT> ParameterDialog.__init__(self, parent, paramname, paramtype, "out", bitsize) <NEW_LINE> self.verif = verif <NEW_LINE> self.default = default <NEW_LINE> self.create_controls() <NEW_LINE> self._create_additional_controls() <NEW_LINE> self.add_okcancel_buttons() <NEW_LINE> self.dolayout() <NEW_LINE> self.Fit()
Editor for configuration parameters
625990608e71fb1e983bd169
class FastSheet(SheetReader): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._native_sheet.title <NEW_LINE> <DEDENT> def row_iterator(self): <NEW_LINE> <INDENT> for row in self._native_sheet.rows: <NEW_LINE> <INDENT> yield row <NEW_LINE> <DEDENT> <DEDENT> def column_iterator(self, row): <NEW_LINE> <INDENT> for cell in row: <NEW_LINE> <INDENT> yield cell.value
Iterate through rows
625990608e7ae83300eea72c
class StatusFrame(tk.Frame): <NEW_LINE> <INDENT> rtdDefaults = { } <NEW_LINE> def __init__(self, parent, **kwargs): <NEW_LINE> <INDENT> kwargs =dict(self.rtdDefaults, **kwargs) <NEW_LINE> tk.Frame.__init__(self, parent, **kwargs) <NEW_LINE> self.dataCom = parent.dataCom <NEW_LINE> self.statusText = tk.Label(self, text="") <NEW_LINE> self.statusText.pack() <NEW_LINE> self.time = 0.0 <NEW_LINE> <DEDENT> def rtdUpdate(self): <NEW_LINE> <INDENT> dataCom = self.dataCom <NEW_LINE> thistime = time.time() <NEW_LINE> elpased = thistime-self.time if self.time else 0.0 <NEW_LINE> text = time.strftime("%Y-%m-%dT%H:%M:%S",time.localtime())+" - " <NEW_LINE> if not dataCom.isDataValid(): <NEW_LINE> <INDENT> text += "Waiting for scan data Elpased time %.2f"%(elpased) <NEW_LINE> <DEDENT> elif dataCom.data.get("TEST.NEW", False): <NEW_LINE> <INDENT> text += "Configuration Changed. Reploting...." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text += "%d Scan - Refresh %.2f sec"%(dataCom.config["N.OPL"],elpased) <NEW_LINE> <DEDENT> self.statusText.config(text=text) <NEW_LINE> self.time = thistime <NEW_LINE> if U: <NEW_LINE> <INDENT> self.update()
A small Frame showing the status of the scope
62599060fff4ab517ebceec6
class LinuxConfiguration(Model): <NEW_LINE> <INDENT> _attribute_map = { 'disable_password_authentication': {'key': 'disablePasswordAuthentication', 'type': 'bool'}, 'ssh': {'key': 'ssh', 'type': 'SshConfiguration'}, } <NEW_LINE> def __init__(self, disable_password_authentication=None, ssh=None): <NEW_LINE> <INDENT> super(LinuxConfiguration, self).__init__() <NEW_LINE> self.disable_password_authentication = disable_password_authentication <NEW_LINE> self.ssh = ssh
Specifies the Linux operating system settings on the virtual machine. <br><br>For a list of supported Linux distributions, see [Linux on Azure-Endorsed Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-endorsed-distros?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json) <br><br> For running non-endorsed distributions, see [Information for Non-Endorsed Distributions](https://docs.microsoft.com/azure/virtual-machines/virtual-machines-linux-create-upload-generic?toc=%2fazure%2fvirtual-machines%2flinux%2ftoc.json). :param disable_password_authentication: Specifies whether password authentication should be disabled. :type disable_password_authentication: bool :param ssh: Specifies the ssh key configuration for a Linux OS. :type ssh: ~azure.mgmt.compute.v2017_03_30.models.SshConfiguration
62599060498bea3a75a5914e