code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Layer(object): <NEW_LINE> <INDENT> def __init__(self, parms = [None, None, None]): <NEW_LINE> <INDENT> super(Layer, self).__init__() <NEW_LINE> self.W, self.b, self.activation = parms <NEW_LINE> <DEDENT> def _set(self, parms): <NEW_LINE> <INDENT> self.W, self.b, self.activation = parms <NEW_LINE> <DEDENT> def _fire(self, M): <NEW_LINE> <INDENT> return self.activation(self.W.dot(M) + self.b)
Layer class which will hold W, b, and activation function. Only users planning on extending functionality should modify or interact with this class.
6259905d16aa5153ce401b22
class Strength(object): <NEW_LINE> <INDENT> def __init__(self, valid, strength, message): <NEW_LINE> <INDENT> self.valid = valid <NEW_LINE> self.strength = strength <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.strength <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return self.valid <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return self.valid
Measure the strength of a password. Here are some common usages of strength:: >>> strength = Strength(True, 'strong', 'password is perfect') >>> bool(strength) True >>> repr(strength) 'strong' >>> str(strength) 'password is perfect' :param valid: if the password is valid to use :param strength: the strength level of the password :param message: a message related to the password
6259905d3617ad0b5ee0778b
class Map: <NEW_LINE> <INDENT> pass
Provides waypoints and other map data. Class Tests: >>> instance = Map()
6259905dcc0a2c111447c5ee
class TestMaxAverageII(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.test_object = [{ 'test_nums': [1,12,-5,-6,50,3], 'test_k': 4, 'test_output': 12.75 }, { 'test_nums': [1, 2, 3, 4], 'test_k': 1, 'test_output': 4 }, { 'test_nums': [5], 'test_k': 1, 'test_output': 5 }] <NEW_LINE> <DEDENT> def test_result(self): <NEW_LINE> <INDENT> obj = maxAverageII() <NEW_LINE> for test_case in self.test_object: <NEW_LINE> <INDENT> result = obj.findMaxAverage(test_case['test_nums'], test_case['test_k']) <NEW_LINE> self.assertEqual(test_case['test_output'], result)
Regtest
6259905d442bda511e95d879
class SearchFolderError(Exception): <NEW_LINE> <INDENT> pass
Raised when search an extra item method fails
6259905d10dbd63aa1c7219a
class OtterKeymaster(object): <NEW_LINE> <INDENT> def __init__(self, host="localhost", port=9160, setup_generator=None): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.setup_generator = ( setup_generator or CQLGenerator(schema_dir + '/setup')) <NEW_LINE> self._keys = {} <NEW_LINE> self.cluster = RunningCassandraCluster( host=host, port=port, setup_cql=(setup_generator or CQLGenerator(schema_dir + '/setup').generate_cql)) <NEW_LINE> <DEDENT> def get_keyspace(self, keyspace_name=None): <NEW_LINE> <INDENT> keyspace_name = keyspace_name or ('a' + uuid.uuid4().hex) <NEW_LINE> if not keyspace_name in self._keys: <NEW_LINE> <INDENT> self._keys[keyspace_name] = KeyspaceWithClient( self.cluster, keyspace_name) <NEW_LINE> <DEDENT> return self._keys[keyspace_name]
Object that keeps track of created keyspaces, PauseableSilverbergClients, and is a factory for PausableSilverbergClients
6259905d7cff6e4e811b7084
class ShortestSeekFirstQueue(Queue): <NEW_LINE> <INDENT> def __init__(self, sort_by='id', queue=None, sort_first_obj=True): <NEW_LINE> <INDENT> self.sort_by = sort_by <NEW_LINE> self._queue = [] <NEW_LINE> self.sort_first_obj = sort_first_obj <NEW_LINE> if queue is not None: <NEW_LINE> <INDENT> self._queue = queue <NEW_LINE> self._sort_by_field(self.sort_by) <NEW_LINE> <DEDENT> <DEDENT> def enqueue(self, obj): <NEW_LINE> <INDENT> self._queue.append(obj) <NEW_LINE> self._sort_by_field(self.sort_by) <NEW_LINE> <DEDENT> def dequeue(self): <NEW_LINE> <INDENT> if self._queue: <NEW_LINE> <INDENT> return self._queue.pop(0) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _sort_by_field(self, field_name): <NEW_LINE> <INDENT> if self.sort_first_obj: <NEW_LINE> <INDENT> self._queue.sort(key=lambda obj: getattr(obj, field_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> head, tail = self._queue[0], self._queue[1:] <NEW_LINE> self._queue = [head] + sorted(tail)
Base interface for ssf queue based on python list
6259905d21a7993f00c675ac
class Point: <NEW_LINE> <INDENT> def __init__(self, x: int, y: int): <NEW_LINE> <INDENT> if not isinstance(x, int) or not isinstance(y, int): <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str.format('({}, {})', self.x, self.y) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self)
Реализует класс 'точка'.
6259905d435de62698e9d444
class PreExecuteNotebooksCommand(Command): <NEW_LINE> <INDENT> description = "Pre-executes Jupyther notebooks included in the documentation" <NEW_LINE> user_options = [ ('notebooks=', 'n', "patterns to match (i.e. 'protocols/SAPDiag*')"), ] <NEW_LINE> def initialize_options(self): <NEW_LINE> <INDENT> self.notebooks = None <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> base_path = "docs/" <NEW_LINE> if self.notebooks: <NEW_LINE> <INDENT> self.notebooks = glob(base_path + self.notebooks) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.notebooks = glob(base_path + "protocols/*.ipynb") <NEW_LINE> self.notebooks.extend(glob(base_path + "fileformats/*.ipynb")) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> for notebook in self.notebooks: <NEW_LINE> <INDENT> system("jupyter nbconvert --inplace --to notebook --execute {}".format(notebook))
Custom command for pre-executing Jupyther notebooks included in the documentation.
6259905dd99f1b3c44d06ce1
class MyTopo( Topo ): <NEW_LINE> <INDENT> def __init__( self ): <NEW_LINE> <INDENT> Topo.__init__( self ) <NEW_LINE> Host1 = self.addHost('h1') <NEW_LINE> Host2 = self.addHost('h2') <NEW_LINE> s1 = self.addSwitch('s1') <NEW_LINE> s2 = self.addSwitch('s2') <NEW_LINE> s3 = self.addSwitch('s3') <NEW_LINE> s4 = self.addSwitch('s4') <NEW_LINE> s5 = self.addSwitch('s5') <NEW_LINE> s6 = self.addSwitch('s6') <NEW_LINE> s7 = self.addSwitch('s7') <NEW_LINE> s8 = self.addSwitch('s8') <NEW_LINE> s9 = self.addSwitch('s9') <NEW_LINE> self.addLink( Host1, s1 ) <NEW_LINE> self.addLink( s1, s2 ) <NEW_LINE> self.addLink( s2, s3 ) <NEW_LINE> self.addLink( s1, s4 ) <NEW_LINE> self.addLink( s2, s5) <NEW_LINE> self.addLink( s3, s6 ) <NEW_LINE> self.addLink( s4, s5 ) <NEW_LINE> self.addLink( s5, s6 ) <NEW_LINE> self.addLink( s4, s7 ) <NEW_LINE> self.addLink( s7, s8 ) <NEW_LINE> self.addLink( s5, s8 ) <NEW_LINE> self.addLink( s8, s9 ) <NEW_LINE> self.addLink( s6, s9 ) <NEW_LINE> self.addLink( s9, Host2 )
Simple topology example.
6259905d097d151d1a2c26ad
class PlotWin(tk.Toplevel): <NEW_LINE> <INDENT> def __init__(self, masterWin, obj, methodNum, year=0): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._enrollmentObj = obj <NEW_LINE> self._year = year <NEW_LINE> self._fig = plt.figure(figsize=(8,4)) <NEW_LINE> if methodNum == 1: <NEW_LINE> <INDENT> xData = self._enrollmentObj.getYears() <NEW_LINE> yData = (self._enrollmentObj.enrollmentTrend(masterWin) / 1000000) <NEW_LINE> graphTitle = 'Enrollment By Year' <NEW_LINE> xLabel = 'Years' <NEW_LINE> yLabel = 'Amount of Students (Per Million)' <NEW_LINE> plt.plot(xData, yData, '-*r', label = 'Students') <NEW_LINE> plt.legend(loc='best') <NEW_LINE> <DEDENT> elif methodNum == 2: <NEW_LINE> <INDENT> xData = ['<19', '20-24', '25-29', '30-34', '35-39', '40-49', '50+'] <NEW_LINE> yData = self._enrollmentObj.enrollmentByYear(self._year, masterWin) <NEW_LINE> graphTitle = 'Enrollment By Age Groups in ' + str(self._year) <NEW_LINE> xLabel = 'Age Groups' <NEW_LINE> yLabel = 'Amount of Students' <NEW_LINE> plt.bar(xData, yData) <NEW_LINE> <DEDENT> plt.title(graphTitle) <NEW_LINE> plt.xlabel(xLabel) <NEW_LINE> plt.ylabel(yLabel) <NEW_LINE> self._canvas = FigureCanvasTkAgg(self._fig, master=self) <NEW_LINE> self._canvas.get_tk_widget().pack() <NEW_LINE> self._canvas.draw()
creates a plot window depending on the Enrollment Method chosen
6259905d498bea3a75a5911d
class Fedora20GenericJenkinsSlave( FedoraGenericJenkinsSlave, GenericFedora20Box ): <NEW_LINE> <INDENT> pass
A generic Jenkins slave for Fedora 20
6259905d379a373c97d9a664
class BaseTokenLimiter: <NEW_LINE> <INDENT> def __init__(self, resource_name, default_limit): <NEW_LINE> <INDENT> self.resource_name = resource_name <NEW_LINE> self.default_limit = default_limit <NEW_LINE> self._manager = None
Base class for both fungible and non-fungible token limiters. Args: resource_name (str): Name of the resource being rate-limited. default_limit (str): Number of tokens to use if no explicit limit is defined in the limits table.
6259905da8ecb03325872857
class Support(Unit): <NEW_LINE> <INDENT> def __init__(self, **characteristics): <NEW_LINE> <INDENT> self.heal = characteristics['heal'] <NEW_LINE> self.increase_attack = characteristics['increase_attack'] <NEW_LINE> self.increase_defence = characteristics['increase_defence'] <NEW_LINE> super().__init__(**characteristics) <NEW_LINE> <DEDENT> def action(self, ally): <NEW_LINE> <INDENT> ally.defence += self.increase_defence <NEW_LINE> ally.attack += self.increase_attack <NEW_LINE> ally.heal += self.heal <NEW_LINE> <DEDENT> def get_attack(self): <NEW_LINE> <INDENT> return 0
docstring for Support
6259905da79ad1619776b5dd
class Fuel(object): <NEW_LINE> <INDENT> CO2emissions = {"gas": .058, "coal": .108, "oil": .081, "other": 0, "none": 0} <NEW_LINE> def __init__(self, type = "none", price = 0, startDate = 0, timeStep = "M", GHGcost = 0, units = "$/MMBtu"): <NEW_LINE> <INDENT> if type in generator.Generator.fuels: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Please enter an appropriate fuel type. See Fuel.fuels.") <NEW_LINE> <DEDENT> self.CO2factor = Fuel.CO2emissions[type] <NEW_LINE> if not type == "none": <NEW_LINE> <INDENT> self.data = pd.Series(data = np.array(price)) <NEW_LINE> if timeStep[0] == 'm' or timeStep[0] == 'M': <NEW_LINE> <INDENT> self.data.index = pd.date_range(startDate, periods = len(price), freq = timeStep[0]) + pd.DateOffset(days = 1) + pd.DateOffset(months = -1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data.index = pd.date_range(startDate, periods = len(price), freq = timeStep[0]) <NEW_LINE> <DEDENT> appendee = pd.Series(self.data[len(self.data)-1], index = pd.date_range(self.data.index[len(self.data) - 1] + 1, periods = 1, freq = self.data.index.freq)) <NEW_LINE> self.data = self.data.append(appendee) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data = [] <NEW_LINE> <DEDENT> self.GHGcost = GHGcost <NEW_LINE> self.units = units <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.type + " price series of length " + str(len(self.data))
The time series data for a fuel type
6259905d0fa83653e46f6527
class ScipyOptimizerInterface(ExternalOptimizerInterface): <NEW_LINE> <INDENT> _DEFAULT_METHOD = 'L-BFGS-B' <NEW_LINE> def _minimize(self, initial_val, loss_grad_func, equality_funcs, equality_grad_funcs, inequality_funcs, inequality_grad_funcs, step_callback, optimizer_kwargs): <NEW_LINE> <INDENT> def loss_grad_func_wrapper(x): <NEW_LINE> <INDENT> loss, gradient = loss_grad_func(x) <NEW_LINE> return loss, gradient.astype('float64') <NEW_LINE> <DEDENT> method = optimizer_kwargs.pop('method', self._DEFAULT_METHOD) <NEW_LINE> constraints = [] <NEW_LINE> for func, grad_func in zip(equality_funcs, equality_grad_funcs): <NEW_LINE> <INDENT> constraints.append({'type': 'eq', 'fun': func, 'jac': grad_func}) <NEW_LINE> <DEDENT> for func, grad_func in zip(inequality_funcs, inequality_grad_funcs): <NEW_LINE> <INDENT> constraints.append({'type': 'ineq', 'fun': func, 'jac': grad_func}) <NEW_LINE> <DEDENT> minimize_args = [loss_grad_func_wrapper, initial_val] <NEW_LINE> minimize_kwargs = { 'jac': True, 'callback': step_callback, 'method': method, 'constraints': constraints, } <NEW_LINE> minimize_kwargs.update(optimizer_kwargs) <NEW_LINE> if method == 'SLSQP': <NEW_LINE> <INDENT> del minimize_kwargs['callback'] <NEW_LINE> <DEDENT> import scipy.optimize <NEW_LINE> result = scipy.optimize.minimize(*minimize_args, **minimize_kwargs) <NEW_LINE> logging.info('Optimization terminated with:\n' ' Message: %s\n' ' Objective function value: %f\n' ' Number of iterations: %d\n' ' Number of functions evaluations: %d', result.message, result.fun, result.nit, result.nfev) <NEW_LINE> return result['x']
Wrapper allowing `scipy.optimize.minimize` to operate a `tf.Session`. Example: ```python vector = tf.Variable([7., 7.], 'vector') # Make vector norm as small as possible. loss = tf.reduce_sum(tf.square(vector)) optimizer = ScipyOptimizerInterface(loss, options={'maxiter': 100}) with tf.Session() as session: optimizer.minimize(session) # The value of vector should now be [0., 0.]. ``` Example with constraints: ```python vector = tf.Variable([7., 7.], 'vector') # Make vector norm as small as possible. loss = tf.reduce_sum(tf.square(vector)) # Ensure the vector's y component is = 1. equalities = [vector[1] - 1.] # Ensure the vector's x component is >= 1. inequalities = [vector[0] - 1.] # Our default SciPy optimization algorithm, L-BFGS-B, does not support # general constraints. Thus we use SLSQP instead. optimizer = ScipyOptimizerInterface( loss, equalities=equalities, inequalities=inequalities, method='SLSQP') with tf.Session() as session: optimizer.minimize(session) # The value of vector should now be [1., 1.]. ```
6259905dfff4ab517ebcee65
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.maxDiff = None <NEW_LINE> filename = 'header_image11.xlsx' <NEW_LINE> test_dir = 'xlsxwriter/test/comparison/' <NEW_LINE> self.got_filename = test_dir + '_test_' + filename <NEW_LINE> self.image_dir = test_dir + 'images/' <NEW_LINE> self.exp_filename = test_dir + 'xlsx_files/' + filename <NEW_LINE> self.ignore_files = [] <NEW_LINE> self.ignore_elements = {'xl/worksheets/sheet1.xml': ['<pageMargins', '<pageSetup']} <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> worksheet.set_header('&L&G', {'image_left': self.image_dir + 'black_300.jpg'}) <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual()
Test file created by XlsxWriter against a file created by Excel.
6259905d435de62698e9d445
class DeviceCmp(DeviceEq): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(DeviceCmp, self).__init__(**kwargs) <NEW_LINE> input_signals_len = max(len(self.first_signals), len(self.second_signals)) <NEW_LINE> functions_eq = self.functions <NEW_LINE> functions_eq_parts = [reduce(And, functions_eq[i:], True) for i in range(1, input_signals_len)] + [True] <NEW_LINE> function_gt = reduce(Or, [ And(Ai, Not(Bi), Xi) for Ai, Bi, Xi in zip(self.first_signals, self.second_signals, functions_eq_parts) ], False) <NEW_LINE> function_lt = reduce(Or, [ And(Not(Ai), Bi, Xi) for Ai, Bi, Xi in zip(self.first_signals, self.second_signals, functions_eq_parts) ], False) <NEW_LINE> self.functions = map(lambda Fi: And(self.strobe_signals_function, Fi), [function_lt, reduce(And, functions_eq, True), function_gt]) <NEW_LINE> self._generate_through_truth_table(signals_list=(self.first_signals, self.second_signals))
Digital comparator device
6259905d07f4c71912bb0a7c
class Monster(Card): <NEW_LINE> <INDENT> def __init__(self, name, desc, level, attack, defense): <NEW_LINE> <INDENT> super().__init__(name, desc) <NEW_LINE> self.level = level <NEW_LINE> self.attack = attack <NEW_LINE> self.defense = defense <NEW_LINE> self.position = None <NEW_LINE> self.turn_count = 0 <NEW_LINE> self.can_change_position = False <NEW_LINE> self.can_attack = False <NEW_LINE> <DEDENT> def full_string(self): <NEW_LINE> <INDENT> return "{name}\n{level}\nMONSTER\n{desc}\n[ATK:{attack}/DEF:{defense}]" .format(name=self.name, level="*" * self.level, desc=self.desc, attack=self.attack, defense=self.defense) <NEW_LINE> <DEDENT> def get_level(self): <NEW_LINE> <INDENT> return self.level <NEW_LINE> <DEDENT> def get_atk(self): <NEW_LINE> <INDENT> return self.attack <NEW_LINE> <DEDENT> def get_def(self): <NEW_LINE> <INDENT> return self.defense <NEW_LINE> <DEDENT> def get_position(self): <NEW_LINE> <INDENT> return self.position <NEW_LINE> <DEDENT> def summon(self, is_set, position): <NEW_LINE> <INDENT> self.is_set = is_set <NEW_LINE> if is_set: <NEW_LINE> <INDENT> self.position = "DEFENSE" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.position = position.upper() <NEW_LINE> self.can_attack = True <NEW_LINE> <DEDENT> <DEDENT> def remove(self): <NEW_LINE> <INDENT> self.position = None <NEW_LINE> self.is_set = False <NEW_LINE> self.can_attack = False <NEW_LINE> self.can_change_position = None <NEW_LINE> <DEDENT> def get_stat(self): <NEW_LINE> <INDENT> if self.position.upper() == "DEFENSE": <NEW_LINE> <INDENT> return self.get_def() <NEW_LINE> <DEDENT> elif self.position.upper() == "ATTACK": <NEW_LINE> <INDENT> return self.get_atk() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_can_attack(self): <NEW_LINE> <INDENT> return self.can_attack and self.position == "ATTACK" <NEW_LINE> <DEDENT> def can_change_pos(self): <NEW_LINE> <INDENT> return self.can_change_position <NEW_LINE> <DEDENT> def flip(self): <NEW_LINE> <INDENT> if self.is_set: <NEW_LINE> <INDENT> self.is_set = False <NEW_LINE> <DEDENT> <DEDENT> def change_pos(self): <NEW_LINE> <INDENT> if self.position == "DEFENSE": <NEW_LINE> <INDENT> self.position = "ATTACK" <NEW_LINE> <DEDENT> elif self.position == "ATTACK": <NEW_LINE> <INDENT> self.position = "DEFENSE" <NEW_LINE> <DEDENT> self.flip() <NEW_LINE> self.can_change_position = False <NEW_LINE> <DEDENT> def lock_attack(self): <NEW_LINE> <INDENT> self.can_attack = False <NEW_LINE> <DEDENT> def unlock_attack(self): <NEW_LINE> <INDENT> self.can_attack = True <NEW_LINE> <DEDENT> def lock_position(self): <NEW_LINE> <INDENT> self.can_change_position = False <NEW_LINE> <DEDENT> def unlock_position(self): <NEW_LINE> <INDENT> self.can_change_position = True
This is a class to represent a monster card. Attributes: name (str): The name of the card. desc (str): The description of the card. is_set (bool): Whether the card is Face-down or not. level (int): The level of the monster. attack (int): The attack of the monster. defense (int): The defense of the monster. position (str): The position of the monster. turn_count (int): How many turns the monster has been on the field. can_change_position (bool): If the monster can change positions. can_attack (bool): If the monster can attack.
6259905d24f1403a926863ee
@register_action <NEW_LINE> class DistinctAction(QuerysetAction): <NEW_LINE> <INDENT> name = 'distinct' <NEW_LINE> properties = { "fields": Schema.array(Schema.str) } <NEW_LINE> def handle(self, queryset: QuerySet, fragment: dict): <NEW_LINE> <INDENT> if 'fields' not in fragment: <NEW_LINE> <INDENT> fragment['fields'] = [] <NEW_LINE> <DEDENT> return queryset.distinct(*fragment['fields'])
Action version of a QuerySet.distinct(...) Example: { "action": "distinct" }
6259905d56b00c62f0fb3f0c
class CrossValSplitter(): <NEW_LINE> <INDENT> def __init__(self, *, numel: int, k_folds: int, shuffled: bool = False): <NEW_LINE> <INDENT> inidicies = np.array([i for i in range(numel)]) <NEW_LINE> if shuffled: <NEW_LINE> <INDENT> np.random.shuffle(inidicies) <NEW_LINE> <DEDENT> self.folds = np.array(np.array_split(inidicies, k_folds), dtype=object) <NEW_LINE> self.current_v_ind = -1 <NEW_LINE> self.val = torch.utils.data.sampler.SubsetRandomSampler(self.folds[0]) <NEW_LINE> self.train = torch.utils.data.sampler.SubsetRandomSampler( np.concatenate(self.folds[1:], axis=0) ) <NEW_LINE> self.metrics = {} <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self.current_v_ind = -1 <NEW_LINE> return self <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.folds) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> assert idx >= 0 and idx < len(self) <NEW_LINE> self.val.inidicies = self.folds[idx] <NEW_LINE> self.train.inidicies = np.concatenate( self.folds[np.arange(len(self)) != idx], axis=0 ) <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> self.current_v_ind += 1 <NEW_LINE> if self.current_v_ind >= len(self): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> self[self.current_v_ind] <NEW_LINE> <DEDENT> def update_metrics(self, to_post: dict): <NEW_LINE> <INDENT> for k, v in to_post.items(): <NEW_LINE> <INDENT> if k in self.metrics: <NEW_LINE> <INDENT> self.metrics[k].append(v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.metrics[k] = [v] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def print_metrics(self): <NEW_LINE> <INDENT> for name, samples in self.metrics.items(): <NEW_LINE> <INDENT> xbar = stats.mean(samples) <NEW_LINE> sx = stats.stdev(samples, xbar) <NEW_LINE> tstar = student_t.ppf(1.0 - 0.025, len(samples) - 1) <NEW_LINE> margin_of_error = tstar * sx / sqrt(len(samples)) <NEW_LINE> print("{}: {} +/- {}".format(name, xbar, margin_of_error))
Class that creates cross validation splits. The train and val splits can be used in pytorch DataLoaders. The splits can be updated by calling next(self) or using a loop: for _ in self: .... Parameters --------- numel : int Number of elements in the training set k_folds : int Number of folds shuffled : bool Whether or not to shuffle which data goes in which fold
6259905d10dbd63aa1c7219b
class Subsystem(ClientObject): <NEW_LINE> <INDENT> def modifyParams(self, command, params, options={}): <NEW_LINE> <INDENT> result = self.obj.modifyParams(command, params, options) <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return
com.redhat.grid.config:Subsystem
6259905d56ac1b37e6303807
class RawFilter(CachedRawPipe): <NEW_LINE> <INDENT> def __init__(self, source, l_freq=None, h_freq=None, cache=True, **kwargs): <NEW_LINE> <INDENT> CachedRawPipe.__init__(self, source, cache) <NEW_LINE> self.args = (l_freq, h_freq) <NEW_LINE> self.kwargs = kwargs <NEW_LINE> if 'use_kwargs' in kwargs: <NEW_LINE> <INDENT> self._use_kwargs = kwargs.pop('use_kwargs') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._use_kwargs = kwargs <NEW_LINE> <DEDENT> <DEDENT> def as_dict(self, args: Sequence[str] = ()): <NEW_LINE> <INDENT> return CachedRawPipe.as_dict(self, [*args, 'args', 'kwargs']) <NEW_LINE> <DEDENT> def filter_ndvar(self, ndvar): <NEW_LINE> <INDENT> return filter_data(ndvar, *self.args, **self._use_kwargs) <NEW_LINE> <DEDENT> def _make(self, subject, recording): <NEW_LINE> <INDENT> raw = self.source.load(subject, recording, preload=True) <NEW_LINE> self.log.info("Raw %s: filtering for %s/%s...", self.name, subject, recording) <NEW_LINE> raw.filter(*self.args, **self._use_kwargs) <NEW_LINE> return raw
Filter raw pipe Parameters ---------- source : str Name of the raw pipe to use for input data. l_freq : scalar | None Low cut-off frequency in Hz. h_freq : scalar | None High cut-off frequency in Hz. cache : bool Cache the resulting raw files (default ``True``). ... :meth:`mne.io.Raw.filter` parameters. See Also -------- MneExperiment.raw
6259905d21a7993f00c675ae
class AliasType(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'paths': {'key': 'paths', 'type': '[AliasPathType]'}, } <NEW_LINE> def __init__( self, *, name: Optional[str] = None, paths: Optional[List["AliasPathType"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(AliasType, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.paths = paths
The alias type. :ivar name: The alias name. :vartype name: str :ivar paths: The paths for an alias. :vartype paths: list[~azure.mgmt.resource.resources.v2016_09_01.models.AliasPathType]
6259905d8e7ae83300eea6cf
class SafeHttpProtocol(eventlet.wsgi.HttpProtocol): <NEW_LINE> <INDENT> def finish(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> eventlet.green.BaseHTTPServer.BaseHTTPRequestHandler.finish(self) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> eventlet.greenio.shutdown_safe(self.connection) <NEW_LINE> self.connection.close()
HttpProtocol wrapper to suppress IOErrors. The proxy code above always shuts down client connections, so we catch the IOError that raises when the SocketServer tries to flush the connection.
6259905da219f33f346c7e47
class HolderTable(Benchmark): <NEW_LINE> <INDENT> def __init__(self, dimensions=2): <NEW_LINE> <INDENT> Benchmark.__init__(self, dimensions) <NEW_LINE> self.bounds = list(zip([-10.0] * self.dimensions, [10.0] * self.dimensions)) <NEW_LINE> self.global_optimum = [(8.055023472141116 , 9.664590028909654), (-8.055023472141116, 9.664590028909654), (8.055023472141116 , -9.664590028909654), (-8.055023472141116, -9.664590028909654)] <NEW_LINE> self.fglob = -19.20850256788675 <NEW_LINE> <DEDENT> def evaluator(self, x, *args): <NEW_LINE> <INDENT> self.fun_evals += 1 <NEW_LINE> return -abs(sin(x[0])*cos(x[1])*exp(abs(1 - sqrt(x[0]**2 + x[1]**2)/pi)))
HolderTable test objective function. This class defines the HolderTable global optimization problem. This is a multimodal minimization problem defined as follows: .. math:: f_{\text{HolderTable}}(\mathbf{x}) = - \left|{e^{\left|{1 - \frac{\sqrt{x_{1}^{2} + x_{2}^{2}}}{\pi} }\right|} \sin\left(x_{1}\right) \cos\left(x_{2}\right)}\right| Here, :math:`n` represents the number of dimensions and :math:`x_i \in [-10, 10]` for :math:`i=1,2`. .. figure:: figures/HolderTable.png :alt: HolderTable function :align: center **Two-dimensional HolderTable function** *Global optimum*: :math:`f(x_i) = -19.20850256788675` for :math:`x_i = \pm 9.664590028909654` for :math:`i=1,2`
6259905dd53ae8145f919aa3
@store.command(name="animate/stop") <NEW_LINE> class StopAnimateCommand(store.Command): <NEW_LINE> <INDENT> animations = store.injected("animations") <NEW_LINE> animation_id = dictobj.Field(sb.string_spec, wrapper=sb.required) <NEW_LINE> async def execute(self): <NEW_LINE> <INDENT> self.animations.stop(self.animation_id) <NEW_LINE> return {"success": True}
Stop a tile animation
6259905d009cb60464d02b78
class LeCunNormal(WeightInitializer): <NEW_LINE> <INDENT> def weights(self, shape): <NEW_LINE> <INDENT> fan_in, fan_out = self.compute_fans(shape) <NEW_LINE> scale = np.sqrt(1. / fan_in) <NEW_LINE> return np.random.normal(low = -scale, high = scale, size = shape) <NEW_LINE> <DEDENT> @property <NEW_LINE> def init_name(self): <NEW_LINE> <INDENT> return self.__class__.__name__
**LeCun Normal (LeCunNormal)** Weights should be randomly chosen but in such a way that the sigmoid is primarily activated in its linear region. LeCun uniform is the implementation based on Gaussian distribution References: [1] Efficient Backprop [LeCun, 1998][PDF] http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf
6259905d1f037a2d8b9e538c
class IMacroTaskStartDate(IStartDate): <NEW_LINE> <INDENT> pass
Adapts a TaskContainer into the start date used to compute the macro task due date.
6259905d0fa83653e46f6529
class SimpleLatticeTestHarness(TestHarness): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SimpleLatticeTestHarness, self).__init__() <NEW_LINE> self.input_set = SimpleLatticeInput(num_dimensions=3) <NEW_LINE> self.num_polar = 4 <NEW_LINE> self.azim_spacing = 0.12 <NEW_LINE> self.z_spacing = 0.14 <NEW_LINE> self.tolerance = 1E-4 <NEW_LINE> <DEDENT> def _create_geometry(self): <NEW_LINE> <INDENT> super(SimpleLatticeTestHarness, self)._create_geometry() <NEW_LINE> self.input_set.geometry.useSymmetry(True, True, True) <NEW_LINE> cmfd = openmoc.Cmfd() <NEW_LINE> cmfd.setLatticeStructure(2,2,2) <NEW_LINE> cmfd.setGroupStructure([[1,2,3], [4,5,6,7]]) <NEW_LINE> cmfd.setKNearest(3) <NEW_LINE> self.input_set.geometry.setCmfd(cmfd) <NEW_LINE> <DEDENT> def _create_trackgenerator(self): <NEW_LINE> <INDENT> geometry = self.input_set.geometry <NEW_LINE> geometry.initializeFlatSourceRegions() <NEW_LINE> self.track_generator = openmoc.TrackGenerator3D(geometry, self.num_azim, self.num_polar, self.azim_spacing, self.z_spacing) <NEW_LINE> self.track_generator.setSegmentFormation(openmoc.OTF_STACKS) <NEW_LINE> <DEDENT> def _generate_tracks(self): <NEW_LINE> <INDENT> self.track_generator.setNumThreads(self.num_threads) <NEW_LINE> self.track_generator.generateTracks() <NEW_LINE> <DEDENT> def _create_solver(self): <NEW_LINE> <INDENT> self.solver = openmoc.CPULSSolver(self.track_generator) <NEW_LINE> self.solver.setNumThreads(self.num_threads) <NEW_LINE> self.solver.setConvergenceThreshold(self.tolerance) <NEW_LINE> <DEDENT> def _get_results(self, num_iters=True, keff=True, fluxes=False, num_fsrs=True, num_tracks=False, num_segments=False, hash_output=False): <NEW_LINE> <INDENT> return super(SimpleLatticeTestHarness, self)._get_results( num_iters=num_iters, keff=keff, fluxes=fluxes, num_fsrs=num_fsrs, num_tracks=num_tracks, num_segments=num_segments, hash_output=hash_output)
An eigenvalue calculation in a 3D lattice with 7-group C5G7 data.
6259905d379a373c97d9a666
class CheckACLTerms(unittest.TestCase): <NEW_LINE> <INDENT> def testEmptyAnonymousTerms(self): <NEW_LINE> <INDENT> a = acl.ACL() <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> a.terms.append(acl.Term()) <NEW_LINE> self.assertEqual(a.terms[i].name, None) <NEW_LINE> <DEDENT> self.assertEqual(len(a.terms), 5) <NEW_LINE> <DEDENT> def testEmptyNamedTerms(self): <NEW_LINE> <INDENT> a = acl.ACL() <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> name = 'term' + str(i) <NEW_LINE> a.terms.append(acl.Term(name)) <NEW_LINE> self.assertEqual(a.terms[i].name, name) <NEW_LINE> <DEDENT> self.assertEqual(len(a.terms), 5)
Test insertion of Term objects into an ACL object
6259905d435de62698e9d447
class RegexTokenizer(DDFSketch): <NEW_LINE> <INDENT> def __init__(self, pattern=r'\s+', min_token_length=2, to_lowercase=True): <NEW_LINE> <INDENT> super(RegexTokenizer, self).__init__() <NEW_LINE> self.settings = dict() <NEW_LINE> self.settings['min_token_length'] = min_token_length <NEW_LINE> self.settings['to_lowercase'] = to_lowercase <NEW_LINE> self.settings['pattern'] = pattern <NEW_LINE> self.input_col = None <NEW_LINE> self.output_col = None <NEW_LINE> self.name = self.__class__.__name__ <NEW_LINE> self.phi_category = OPTGroup.OPT_SERIAL <NEW_LINE> self.tag = self.name <NEW_LINE> <DEDENT> def transform(self, data, input_col, output_col=None): <NEW_LINE> <INDENT> if isinstance(input_col, list): <NEW_LINE> <INDENT> raise Exception('`input_col` must be a single column') <NEW_LINE> <DEDENT> self.input_col = input_col <NEW_LINE> if not output_col: <NEW_LINE> <INDENT> output_col = "{}_token".format(self.input_col) <NEW_LINE> <DEDENT> self.output_col = output_col <NEW_LINE> self.settings = self.__dict__.copy() <NEW_LINE> uuid_key = ContextBase .ddf_add_task(operation=self, parent=[data.last_uuid]) <NEW_LINE> return DDF(last_uuid=uuid_key) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def function(df, params): <NEW_LINE> <INDENT> return _tokenizer_(df, params)
A regex based tokenizer that extracts tokens either by using the provided regex pattern (in Java dialect) to split the text. :Example: >>> ddf2 = RegexTokenizer(input_col='col_0', pattern=r"(?u)\w\w+") ... .transform(ddf_input)
6259905d29b78933be26abe5
class EdgeResizingHandle(ResizingHandle): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> super(EdgeResizingHandle, self).__init__(parent) <NEW_LINE> self.setPen(self.parentResizable.getEdgeResizingHandleHiddenPen()) <NEW_LINE> <DEDENT> def hoverEnterEvent(self, event): <NEW_LINE> <INDENT> super(EdgeResizingHandle, self).hoverEnterEvent(event) <NEW_LINE> self.setPen(self.parentResizable.getEdgeResizingHandleHoverPen()) <NEW_LINE> <DEDENT> def hoverLeaveEvent(self, event): <NEW_LINE> <INDENT> self.setPen(self.parentResizable.getEdgeResizingHandleHiddenPen()) <NEW_LINE> super(EdgeResizingHandle, self).hoverLeaveEvent(event)
Resizing handle positioned on one of the 4 edges
6259905d0c0af96317c57880
class TestInvoiceTable(TableTest): <NEW_LINE> <INDENT> table = model.billing.tables.invoice <NEW_LINE> samples = [dict(person_id=1, issue_date=datetime.datetime(2006, 11, 21), ), dict(person_id=2, issue_date=datetime.datetime(2006, 11, 22), ), ] <NEW_LINE> not_nullables = ['person_id']
Test the ``invoice`` table.
6259905d32920d7e50bc7689
class TrackLoader(object): <NEW_LINE> <INDENT> write_batch_size = 1000 <NEW_LINE> def __init__(self, session, tags, id_cache): <NEW_LINE> <INDENT> self._session = session <NEW_LINE> self._tags = tags <NEW_LINE> self._id_cache = id_cache <NEW_LINE> <DEDENT> def insert(self, cls, id_gen): <NEW_LINE> <INDENT> for batch in avalon.util.partition(self._tags, self.write_batch_size): <NEW_LINE> <INDENT> self._insert_batch(cls, id_gen, batch) <NEW_LINE> <DEDENT> <DEDENT> def _insert_batch(self, cls, id_gen, batch): <NEW_LINE> <INDENT> queued = [self._get_new_obj(cls, id_gen, tag) for tag in batch] <NEW_LINE> self._session.add_all(queued) <NEW_LINE> _flush_session(self._session) <NEW_LINE> <DEDENT> def _get_new_obj(self, cls, id_gen, tag): <NEW_LINE> <INDENT> obj = cls() <NEW_LINE> obj.id = id_gen(tag.path) <NEW_LINE> obj.name = tag.title <NEW_LINE> obj.length = tag.length <NEW_LINE> obj.track = tag.track <NEW_LINE> obj.year = tag.year <NEW_LINE> obj.album_id = self._id_cache.get_album_id(tag.album) <NEW_LINE> obj.artist_id = self._id_cache.get_artist_id(tag.artist) <NEW_LINE> obj.genre_id = self._id_cache.get_genre_id(tag.genre) <NEW_LINE> return obj
Create and insert entries for each tag and associated IDs. :cvar int write_batch_size: How many tracks to insert into a session at a time (between calls to flush the session).
6259905d7b25080760ed8801
class MelTopicData(MelGroups): <NEW_LINE> <INDENT> def __init__(self, attr): <NEW_LINE> <INDENT> MelGroups.__init__(self, attr, MelUnion({ 0: MelStruct(b'PDTO', [u'2I'], u'data_type', (FID, u'topic_ref')), 1: MelStruct(b'PDTO', [u'I', u'4s'], u'data_type', u'topic_subtype'), }, decider=PartialLoadDecider( loader=MelUInt32(b'PDTO', u'data_type'), decider=AttrValDecider(u'data_type'))), )
Occurs twice in PACK, so moved here to deduplicate the definition a bit. Can't be placed inside MrePack, since one of its own subclasses depends on this.
6259905df548e778e596cbcc
class VosiCapabilityRenderer(BaseRenderer): <NEW_LINE> <INDENT> charset = 'utf-8' <NEW_LINE> ns_vosicap = 'http://www.ivoa.net/xml/VOSICapabilities/v1.0' <NEW_LINE> ns_vs = 'http://www.ivoa.net/xml/VODataService/v1.1' <NEW_LINE> ns_xsi = "http://www.w3.org/2001/XMLSchema-instance" <NEW_LINE> ns_vr = "http://www.ivoa.net/xml/VOResource/v1.0" <NEW_LINE> version = '1.1' <NEW_LINE> comment = "<!--\n" + " ! Generated using Django with SimplerXMLGenerator\n" + " ! at "+str(timezone.now())+"\n" + " !-->\n" <NEW_LINE> def render(self, capabilities, prettyprint=False): <NEW_LINE> <INDENT> stream = StringIO() <NEW_LINE> xml = SimplerXMLGenerator(stream, self.charset) <NEW_LINE> xml.startDocument() <NEW_LINE> xml._write(self.comment) <NEW_LINE> nsattrs = {} <NEW_LINE> nsattrs['version'] = self.version <NEW_LINE> nsattrs['xmlns:vosi'] = self.ns_vosicap <NEW_LINE> nsattrs['xmlns:xsi'] = self.ns_xsi <NEW_LINE> nsattrs['xmlns:vs'] = self.ns_vs <NEW_LINE> nsattrs['xmlns:vr'] = self.ns_vr <NEW_LINE> xml.startElement('vosi:capabilities', nsattrs) <NEW_LINE> for capability in capabilities: <NEW_LINE> <INDENT> attrs = {} <NEW_LINE> if capability.standardID: <NEW_LINE> <INDENT> attrs['standardID'] = str(capability.standardID) <NEW_LINE> <DEDENT> xml.startElement('capability', attrs) <NEW_LINE> interfaces = capability.voresource_interface_set.all() <NEW_LINE> for interface in interfaces: <NEW_LINE> <INDENT> attrs = {} <NEW_LINE> if interface.type: <NEW_LINE> <INDENT> attrs['xsi:type'] = interface.type <NEW_LINE> <DEDENT> xml.startElement('interface', attrs) <NEW_LINE> accessurls = interface.voresource_accessurl_set.all() <NEW_LINE> for accessurl in accessurls: <NEW_LINE> <INDENT> attrs = {} <NEW_LINE> attrs['use'] = accessurl.use <NEW_LINE> xml.startElement('accessURL', attrs) <NEW_LINE> xml.characters(smart_unicode(accessurl.url)) <NEW_LINE> xml.endElement('accessURL') <NEW_LINE> <DEDENT> xml.endElement('interface') <NEW_LINE> <DEDENT> xml.endElement('capability') <NEW_LINE> <DEDENT> xml.endElement('vosi:capabilities') <NEW_LINE> xml.endDocument() <NEW_LINE> xml_string = stream.getvalue() <NEW_LINE> if prettyprint is True: <NEW_LINE> <INDENT> parsed = etree.fromstring(xml_string) <NEW_LINE> pretty_xml = etree.tostring(parsed, pretty_print=True) <NEW_LINE> xml_string = pretty_xml <NEW_LINE> <DEDENT> return xml_string
Takes capability data (from a queryset) and returns an xmlstream for VOSI capabilities/ endpoint
6259905da17c0f6771d5d6c4
class CategorySerializer2(serializers.ModelSerializer): <NEW_LINE> <INDENT> sub_cat = CategorySerializer3(many=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = GoodsCategory <NEW_LINE> fields = "__all__"
商品类别序列化
6259905d7047854f46340a03
class AnnotationTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(AnnotationTestCase, self).setUp() <NEW_LINE> self.client = APIClient() <NEW_LINE> self.index_create_url = reverse("annotations-list") <NEW_LINE> self.annotation = { "annotator_schema_version": "v1.0", "text": "A note I wrote", "quote": "the text that was annotated", "uri": "http://example.com", "ranges": [ { "start": "/p[69]/span/span", "end": "/p[70]/span/span", "startOffset": 0, "endOffset": 120 } ] } <NEW_LINE> <DEDENT> def create_annotation(self, annotation=None): <NEW_LINE> <INDENT> return self.client.post(self.index_create_url, data=json.dumps(annotation or self.annotation), content_type="application/json")
Base class with a few utility methods. The `documentation <http://docs.annotatorjs.org/en/v1.2.x/storage.html>`_ at forms the basis for many of the tests.
6259905d76e4537e8c3f0bd0
class PolyErrorMessage(PolyMessage): <NEW_LINE> <INDENT> def __init__(self, message_code, file_name, line, start_pos, end_pos, text): <NEW_LINE> <INDENT> self.message_code = message_code <NEW_LINE> self.location = PolyLocation(file_name, line, start_pos, end_pos) <NEW_LINE> self.text = text
A message detailing an error (or warning) from Poly/ML
6259905d10dbd63aa1c7219c
class LogParametersCountHook(tf.train.SessionRunHook): <NEW_LINE> <INDENT> def begin(self): <NEW_LINE> <INDENT> tf.logging.info("Number of trainable parameters: %d", misc.count_parameters())
Simple hook that logs the number of trainable parameters.
6259905d30dc7b76659a0da2
class Student(User): <NEW_LINE> <INDENT> student_registration = models.IntegerField()
Student's docstring.
6259905d8e7ae83300eea6d1
class TestProjectRoleView(ProjectMixin, RoleAssignmentMixin, TestViewsBase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.project = self._make_project( 'TestProject', PROJECT_TYPE_PROJECT, None ) <NEW_LINE> self.owner_as = self._make_assignment( self.project, self.user, self.role_owner ) <NEW_LINE> self.user_delegate = self.make_user('delegate') <NEW_LINE> self.delegate_as = self._make_assignment( self.project, self.user_delegate, self.role_delegate ) <NEW_LINE> self.user_new = self.make_user('guest') <NEW_LINE> self.guest_as = self._make_assignment( self.project, self.user_new, self.role_guest ) <NEW_LINE> <DEDENT> def test_render(self): <NEW_LINE> <INDENT> with self.login(self.user): <NEW_LINE> <INDENT> response = self.client.get( reverse( 'projectroles:roles', kwargs={'project': self.project.sodar_uuid}, ) ) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertEqual(response.context['project'].pk, self.project.pk) <NEW_LINE> expected = { 'id': self.owner_as.pk, 'project': self.project.pk, 'role': self.role_owner.pk, 'user': self.user.pk, 'sodar_uuid': self.owner_as.sodar_uuid, } <NEW_LINE> self.assertEqual(model_to_dict(response.context['owner']), expected) <NEW_LINE> expected = { 'id': self.delegate_as.pk, 'project': self.project.pk, 'role': self.role_delegate.pk, 'user': self.user_delegate.pk, 'sodar_uuid': self.delegate_as.sodar_uuid, } <NEW_LINE> self.assertEqual(model_to_dict(response.context['delegate']), expected) <NEW_LINE> expected = { 'id': self.guest_as.pk, 'project': self.project.pk, 'role': self.role_guest.pk, 'user': self.user_new.pk, 'sodar_uuid': self.guest_as.sodar_uuid, } <NEW_LINE> self.assertEqual( model_to_dict(response.context['members'][0]), expected ) <NEW_LINE> <DEDENT> def test_render_not_found(self): <NEW_LINE> <INDENT> with self.login(self.user): <NEW_LINE> <INDENT> response = self.client.get( reverse( 'projectroles:roles', kwargs={'project': INVALID_UUID}, ) ) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, 404)
Tests for project roles view
6259905dd268445f2663a67e
class Client(object): <NEW_LINE> <INDENT> def __init__(self, api_base_url, auth): <NEW_LINE> <INDENT> self._api_base_url = api_base_url <NEW_LINE> self._auth = auth <NEW_LINE> <DEDENT> def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_pool(self, name): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_pools(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def create_pool(self, name, slots, description): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def delete_pool(self, name): <NEW_LINE> <INDENT> raise NotImplementedError()
Base API client for all API clients.
6259905d4a966d76dd5f0536
class MonitorBiAccessAnalysis(BaseModel): <NEW_LINE> <INDENT> access_source = models.IntegerField(default=0) <NEW_LINE> table_content = models.CharField(max_length=5000, default='') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'monitor_bi_access_analysis'
通过网页/API访问BI的日志统计数据, 从数据库 bi 分析 bi_permission_logs(web)/ bi_permission_api_log(api) 数据表获得的统计信息
6259905d097d151d1a2c26b2
class MockElasticCluster(object): <NEW_LINE> <INDENT> def stats(self, node_id=None): <NEW_LINE> <INDENT> raise TransportError("custom error", 123)
Mock of Elasticsearch ClusterClient
6259905d435de62698e9d449
class _TileTranslateTask: <NEW_LINE> <INDENT> def __init__(self, src, targ, dx, dy): <NEW_LINE> <INDENT> self._src = src <NEW_LINE> self._targ = targ <NEW_LINE> self._dx = int(dx) <NEW_LINE> self._dy = int(dy) <NEW_LINE> self._slices_x = tiledsurface.calc_translation_slices(self._dx) <NEW_LINE> self._slices_y = tiledsurface.calc_translation_slices(self._dy) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{name} dx={dx} dy={dy}>".format( name = self.__class__.__name__, dx = self._dx, dy = self._dy, ) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> (src_tx, src_ty), src_tile = self._src.popitem() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> src = src_tile.to_array() <NEW_LINE> slices_x = self._slices_x <NEW_LINE> slices_y = self._slices_y <NEW_LINE> is_integral = len(slices_x) == 1 and len(slices_y) == 1 <NEW_LINE> for (src_x0, src_x1), (targ_tdx, targ_x0, targ_x1) in slices_x: <NEW_LINE> <INDENT> for (src_y0, src_y1), (targ_tdy, targ_y0, targ_y1) in slices_y: <NEW_LINE> <INDENT> targ_tx = src_tx + targ_tdx <NEW_LINE> targ_ty = src_ty + targ_tdy <NEW_LINE> if is_integral: <NEW_LINE> <INDENT> self._targ[targ_tx, targ_ty] = src <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> targ = self._targ.get((targ_tx, targ_ty), None) <NEW_LINE> if targ is None: <NEW_LINE> <INDENT> targ = np.zeros((N, N), 'uint8') <NEW_LINE> self._targ[targ_tx, targ_ty] = targ <NEW_LINE> <DEDENT> targ[targ_y0:targ_y1, targ_x0:targ_x1] = src[src_y0:src_y1, src_x0:src_x1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return bool(self._src)
Translate/move tiles (compressed strokemap -> uncompressed tmp) Calling this task is destructive to the source strokemap, so it must be paired with a _TileRecompressTask queued up to fire when it has completely finished. Tiles are translated by slicing and recombining, so this task must be called to completion before the output tiledict will be ready for recompression.
6259905d3eb6a72ae038bca3
class Gas(BinarySensor): <NEW_LINE> <INDENT> def __init__(self, block, position): <NEW_LINE> <INDENT> super(Gas, self).__init__( block, position, 'gas', 'gas_sensor/alarm_state')
Class to represent a Gas sensor
6259905d379a373c97d9a669
class AzureFirewallFqdnTagsOperations(object): <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> def list_all( self, **kwargs ): <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> api_version = "2021-05-01" <NEW_LINE> accept = "application/json" <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> header_parameters = {} <NEW_LINE> header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') <NEW_LINE> if not next_link: <NEW_LINE> <INDENT> url = self.list_all.metadata['url'] <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> request = self._client.get(url, query_parameters, header_parameters) <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize('AzureFirewallFqdnTagListResult', pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return deserialized.next_link or None, iter(list_of_elem) <NEW_LINE> <DEDENT> def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return ItemPaged( get_next, extract_data ) <NEW_LINE> <DEDENT> list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/azureFirewallFqdnTags'}
AzureFirewallFqdnTagsOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2021_05_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.
6259905d7b25080760ed8802
class Venue(Media): <NEW_LINE> <INDENT> def __init__(self, location, title, address, foursquare_id=None): <NEW_LINE> <INDENT> super(Venue, self).__init__() <NEW_LINE> from pytgbot.api_types.receivable.media import Location <NEW_LINE> assert(location is not None) <NEW_LINE> assert(isinstance(location, Location)) <NEW_LINE> self.location = location <NEW_LINE> assert(title is not None) <NEW_LINE> assert(isinstance(title, str)) <NEW_LINE> self.title = title <NEW_LINE> assert(address is not None) <NEW_LINE> assert(isinstance(address, str)) <NEW_LINE> self.address = address <NEW_LINE> assert(foursquare_id is None or isinstance(foursquare_id, str)) <NEW_LINE> self.foursquare_id = foursquare_id <NEW_LINE> <DEDENT> def to_array(self): <NEW_LINE> <INDENT> array = super(Venue, self).to_array() <NEW_LINE> array['location'] = self.location.to_array() <NEW_LINE> array['title'] = str(self.title) <NEW_LINE> array['address'] = str(self.address) <NEW_LINE> if self.foursquare_id is not None: <NEW_LINE> <INDENT> array['foursquare_id'] = str(self.foursquare_id) <NEW_LINE> <DEDENT> return array <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_array(array): <NEW_LINE> <INDENT> if array is None or not array: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> assert(isinstance(array, dict)) <NEW_LINE> from pytgbot.api_types.receivable.media import Location <NEW_LINE> data = {} <NEW_LINE> data['location'] = Location.from_array(array.get('location')) <NEW_LINE> data['title'] = str(array.get('title')) <NEW_LINE> data['address'] = str(array.get('address')) <NEW_LINE> data['foursquare_id'] = str(array.get('foursquare_id')) if array.get('foursquare_id') is not None else None <NEW_LINE> return Venue(**data) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Venue(location={self.location!r}, title={self.title!r}, address={self.address!r}, foursquare_id={self.foursquare_id!r})".format(self=self) <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return key in ["location", "title", "address", "foursquare_id"]
This object represents a venue. https://core.telegram.org/bots/api#venue
6259905d99cbb53fe6832525
class Controller: <NEW_LINE> <INDENT> def __init__(self, engine): <NEW_LINE> <INDENT> self.engine = engine <NEW_LINE> self.max_video_number = 20 <NEW_LINE> self.running_vid = 0 <NEW_LINE> <DEDENT> def play(self, file, speed, vid_number): <NEW_LINE> <INDENT> self.engine.play(file, speed) <NEW_LINE> self.running_vid = vid_number
freemix controller class. Acts like a man in the middle between interfaces and the gstreamer engine.
6259905d6e29344779b01c93
class ImageList(generics.ListCreateAPIView): <NEW_LINE> <INDENT> permission_classes = (IsAdminOrReadOnly,) <NEW_LINE> queryset = Image.objects.all().order_by('id') <NEW_LINE> serializer_class = ImageSerializer
List all images, or create a new image.
6259905d3617ad0b5ee07791
class RDFFindSpec(rdfvalue.FindSpec): <NEW_LINE> <INDENT> pass
Clients prior to 2.9.1.0 used this name for this protobuf. We need to understand it on the server as well if a response from an old client comes in so we define an alias here.
6259905d442bda511e95d87c
class FunctionTypedSingle(FunctionSingle, FunctionTyped): <NEW_LINE> <INDENT> pass
A Postgresql function that returns a single row having a single (typically composite) column
6259905d45492302aabfdb1e
@typing.final <NEW_LINE> class Vendor(int, Enum): <NEW_LINE> <INDENT> ZAVALA = 69482069 <NEW_LINE> XUR = 2190858386 <NEW_LINE> BANSHE = 672118013 <NEW_LINE> SPIDER = 863940356 <NEW_LINE> SHAXX = 3603221665 <NEW_LINE> KADI = 529635856 <NEW_LINE> YUNA = 1796504621 <NEW_LINE> EVERVERSE = 3361454721 <NEW_LINE> AMANDA = 460529231 <NEW_LINE> CROW = 3611983588 <NEW_LINE> HAWTHORNE = 3347378076 <NEW_LINE> ADA1 = 350061650 <NEW_LINE> DRIFTER = 248695599 <NEW_LINE> IKORA = 1976548992 <NEW_LINE> SAINT = 765357505 <NEW_LINE> ERIS_MORN = 1616085565 <NEW_LINE> SHAW_HAWN = 1816541247 <NEW_LINE> VARIKS = 2531198101
An Enum for all available vendors in Destiny 2.
6259905d7d847024c075da17
class Environment: <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self._state_cache = {} <NEW_LINE> self._observation_cache = {} <NEW_LINE> self._action_cache = {} <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> <DEDENT> def get_state(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def _cache_item(self, cache, key, callback): <NEW_LINE> <INDENT> if key not in cache: <NEW_LINE> <INDENT> cache[key] = callback() <NEW_LINE> <DEDENT> return cache[key] <NEW_LINE> <DEDENT> def _cache_state(self, key, callback): <NEW_LINE> <INDENT> return self._cache_item(self._state_cache, key, callback) <NEW_LINE> <DEDENT> def _cache_observation(self, key, callback): <NEW_LINE> <INDENT> return self._cache_item(self._observation_cache, key, callback) <NEW_LINE> <DEDENT> def _cache_action(self, key, callback): <NEW_LINE> <INDENT> return self._cache_item(self._action_cache, key, callback) <NEW_LINE> <DEDENT> def get_observation(self): <NEW_LINE> <INDENT> return self.get_state() <NEW_LINE> <DEDENT> def get_actions(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def end_of_episode(self): <NEW_LINE> <INDENT> return self.get_actions() == [] <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def start_new_episode(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def react(self, action): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def visualize(self): <NEW_LINE> <INDENT> raise NotImplementedError()
A reinforcement learning environment.
6259905d627d3e7fe0e084d0
class UriSchemes: <NEW_LINE> <INDENT> FTP = "ftp" <NEW_LINE> HTTP = "http" <NEW_LINE> HTTPS = "https" <NEW_LINE> IMAP = "imap" <NEW_LINE> MAILTO = "mailto" <NEW_LINE> SFTP = "sftp" <NEW_LINE> SMS = "sms" <NEW_LINE> SSH = "ssh" <NEW_LINE> TEL = "tel" <NEW_LINE> TELNET = "telnet"
Common URI schemes. See https://en.wikipedia.org/wiki/List_of_URI_schemes.
6259905de64d504609df9ef1
class ProviderOf(object): <NEW_LINE> <INDENT> def __init__(self, interface): <NEW_LINE> <INDENT> self.interface = interface
Can be used to get a :class:`BoundProvider` of an interface, for example: >>> def provide_int(): ... print('providing') ... return 123 >>> >>> def configure(binder): ... binder.bind(int, to=provide_int) >>> >>> injector = Injector(configure) >>> provider = injector.get(ProviderOf(int)) >>> type(provider) <class 'injector.BoundProvider'> >>> value = provider.get() providing >>> value 123
6259905d16aa5153ce401b29
class ODSTableSet(object): <NEW_LINE> <INDENT> def __init__(self, fileobj, window=None, **kw): <NEW_LINE> <INDENT> if hasattr(fileobj, "read"): <NEW_LINE> <INDENT> fileobj = io.BytesIO(fileobj.read()) <NEW_LINE> <DEDENT> self.window = window <NEW_LINE> zf = zipfile.ZipFile(fileobj).open("content.xml") <NEW_LINE> self.content = zf.read() <NEW_LINE> zf.close() <NEW_LINE> self._table_matcher = ODS_TABLE_MATCH <NEW_LINE> self._document_close_tag = ODS_DOCUMENT_CLOSE_TAG <NEW_LINE> self._namespace_tag_matcher = ODS_NAMESPACES_TAG_MATCH <NEW_LINE> self._row_set_cls = ODSRowSet <NEW_LINE> <DEDENT> def make_tables(self): <NEW_LINE> <INDENT> namespace_tags = self._get_namespace_tags() <NEW_LINE> sheets = [ m.groups(0)[0] for m in self._table_matcher.finditer(self.content) ] <NEW_LINE> return [ self._row_set_cls(sheet, self.window, namespace_tags) for sheet in sheets ] <NEW_LINE> <DEDENT> def _get_namespace_tags(self): <NEW_LINE> <INDENT> match = re.search(self._namespace_tag_matcher, self.content) <NEW_LINE> assert match <NEW_LINE> tag_open = match.groups()[0] <NEW_LINE> tag_close = self._document_close_tag <NEW_LINE> return tag_open, tag_close
A wrapper around ODS files. Because they are zipped and the info we want is in the zipped file as content.xml we must ensure that we either have a seekable object (local file) or that we retrieve all of the content from the remote URL.
6259905d38b623060ffaa372
class ResourceResolverOperations(NamedTuple): <NEW_LINE> <INDENT> parent_urn: Optional[str] <NEW_LINE> serialized_props: Dict[str, Any] <NEW_LINE> dependencies: Set[str] <NEW_LINE> provider_ref: Optional[str]
The set of properties resulting from a successful call to prepare_resource.
6259905d097d151d1a2c26b4
class StrList(Type, click.ParamType): <NEW_LINE> <INDENT> name = "comma_sep_str_list" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.allowed_values = None <NEW_LINE> <DEDENT> def __or__(self, other) -> t.Union[Either, 'StrList']: <NEW_LINE> <INDENT> if isinstance(other, Exact) and isinstance(other.exp_value, Str()): <NEW_LINE> <INDENT> if self.allowed_values is None: <NEW_LINE> <INDENT> self.allowed_values = [other.exp_value] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.allowed_values.append(other.exp_value) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> return super().__or__(other) <NEW_LINE> <DEDENT> def _instancecheck_impl(self, value, info: Info) -> InfoMsg: <NEW_LINE> <INDENT> res = List(Str()).__instancecheck__(value, info) <NEW_LINE> if not res: <NEW_LINE> <INDENT> return info.errormsg(self, "Not a list of strings", value) <NEW_LINE> <DEDENT> if self.allowed_values is None or all(val in self.allowed_values for val in value): <NEW_LINE> <INDENT> return info.wrap(True) <NEW_LINE> <DEDENT> return info.errormsg(self, "Does contain invalid elements", value) <NEW_LINE> <DEDENT> def convert(self, value, param, ctx: click.Context) -> t.List[str]: <NEW_LINE> <INDENT> if isinstance(value, self): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> elif isinstance(value, str): <NEW_LINE> <INDENT> value = str(value) <NEW_LINE> return value.split(",") <NEW_LINE> <DEDENT> self.fail("{} is no valid comma separated string list".format(value), param, ctx) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> if self.allowed_values is None: <NEW_LINE> <INDENT> return "StrList()" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "StrList(allowed={})".format(repr(self.allowed_values)) <NEW_LINE> <DEDENT> <DEDENT> def get_default_yaml(self, indents: int = 0, indentation: int = 4, str_list: bool = False, defaults=None, comment_out_defaults: bool = False) -> str: <NEW_LINE> <INDENT> if defaults is None: <NEW_LINE> <INDENT> defaults = self.get_default() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> typecheck(defaults, self) <NEW_LINE> <DEDENT> i_str = " " * indents * indentation <NEW_LINE> ret_str = i_str + "[{}]".format(", ".join(defaults)) <NEW_LINE> return [ret_str] if str_list else ret_str <NEW_LINE> <DEDENT> def _eq_impl(self, other: 'StrList') -> bool: <NEW_LINE> <INDENT> return self.allowed_values == other.allowed_values
A comma separated string list which contains elements from a fixed set of allowed values.
6259905da79ad1619776b5e0
class ShortcutManagerPlugin: <NEW_LINE> <INDENT> def __init__(self, iface): <NEW_LINE> <INDENT> self.iface = iface <NEW_LINE> self.plugin_dir = os.path.dirname(__file__).decode(sys.getfilesystemencoding()) <NEW_LINE> locale = QSettings().value('locale/userLocale')[0:2] <NEW_LINE> locale_path = os.path.join( self.plugin_dir, 'i18n', 'ShortcutManager_{}.qm'.format(locale)) <NEW_LINE> if os.path.exists(locale_path): <NEW_LINE> <INDENT> self.translator = QTranslator() <NEW_LINE> self.translator.load(locale_path) <NEW_LINE> if qVersion() > '4.3.3': <NEW_LINE> <INDENT> QCoreApplication.installTranslator(self.translator) <NEW_LINE> <DEDENT> <DEDENT> self.actions = [] <NEW_LINE> self.menu = self.tr(u'&Shortcut Manager') <NEW_LINE> self.manager = ShortcutManager(self.iface) <NEW_LINE> <DEDENT> def tr(self, message): <NEW_LINE> <INDENT> return QCoreApplication.translate('ShortcutManager', message) <NEW_LINE> <DEDENT> def add_action( self, icon, text, callback, enabled_flag=True, add_to_menu=True, add_to_toolbar=True, status_tip=None, whats_this=None, parent=None): <NEW_LINE> <INDENT> action = QAction(icon, text, parent) <NEW_LINE> action.triggered.connect(callback) <NEW_LINE> action.setEnabled(enabled_flag) <NEW_LINE> if status_tip is not None: <NEW_LINE> <INDENT> action.setStatusTip(status_tip) <NEW_LINE> <DEDENT> if whats_this is not None: <NEW_LINE> <INDENT> action.setWhatsThis(whats_this) <NEW_LINE> <DEDENT> if add_to_toolbar: <NEW_LINE> <INDENT> self.iface.addToolBarIcon(action) <NEW_LINE> <DEDENT> if add_to_menu: <NEW_LINE> <INDENT> self.iface.addPluginToMenu( self.menu, action) <NEW_LINE> <DEDENT> self.actions.append(action) <NEW_LINE> return action <NEW_LINE> <DEDENT> def initGui(self): <NEW_LINE> <INDENT> shortcutManageIcon = QIcon(":/ShortcutManager/icons/icon.png" ) <NEW_LINE> shortcutManageText = "Shortcut manager" <NEW_LINE> self.add_action( shortcutManageIcon, shortcutManageText, callback=self.run, parent=self.iface.mainWindow(), add_to_toolbar = False) <NEW_LINE> <DEDENT> def unload(self): <NEW_LINE> <INDENT> for action in self.actions: <NEW_LINE> <INDENT> self.iface.removePluginMenu( self.tr(u'&Shortcut Manager'), action) <NEW_LINE> self.iface.removeToolBarIcon(action) <NEW_LINE> <DEDENT> self.manager.unload() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.manager.dialog.show() <NEW_LINE> result = self.manager.dialog.exec_() <NEW_LINE> if result: <NEW_LINE> <INDENT> pass
QGIS Plugin Implementation.
6259905dd6c5a102081e3768
class FileField: <NEW_LINE> <INDENT> def __init__( self, name: str, value: typing.Union[typing.IO[typing.AnyStr], tuple] ) -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> if not isinstance(value, tuple): <NEW_LINE> <INDENT> self.filename = Path(str(getattr(value, "name", "upload"))).name <NEW_LINE> self.file = ( value ) <NEW_LINE> self.content_type = self.guess_content_type() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.filename = value[0] <NEW_LINE> self.file = value[1] <NEW_LINE> self.content_type = ( value[2] if len(value) > 2 else self.guess_content_type() ) <NEW_LINE> <DEDENT> <DEDENT> def guess_content_type(self) -> typing.Optional[str]: <NEW_LINE> <INDENT> if self.filename: <NEW_LINE> <INDENT> return ( mimetypes.guess_type(self.filename)[0] or "application/octet-stream" ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def render_headers(self) -> bytes: <NEW_LINE> <INDENT> parts = [ b"Content-Disposition: form-data; ", format_form_param("name", self.name), ] <NEW_LINE> if self.filename: <NEW_LINE> <INDENT> filename = format_form_param("filename", self.filename) <NEW_LINE> parts.extend([b"; ", filename]) <NEW_LINE> <DEDENT> if self.content_type is not None: <NEW_LINE> <INDENT> content_type = self.content_type.encode() <NEW_LINE> parts.extend([b"\r\nContent-Type: ", content_type]) <NEW_LINE> <DEDENT> parts.append(b"\r\n\r\n") <NEW_LINE> return b"".join(parts) <NEW_LINE> <DEDENT> def render_data(self) -> bytes: <NEW_LINE> <INDENT> if isinstance(self.file, str): <NEW_LINE> <INDENT> content = self.file <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> content = self.file.read() <NEW_LINE> <DEDENT> return content.encode("utf-8") if isinstance(content, str) else content
A single file field item, within a multipart form field.
6259905d379a373c97d9a66a
class Lstm(nn.Module): <NEW_LINE> <INDENT> def __init__(self, weights): <NEW_LINE> <INDENT> super(Lstm, self).__init__() <NEW_LINE> self.n_code: int = 16 <NEW_LINE> self.lstm_size: int = 128 <NEW_LINE> self.batch_size: int = 64 <NEW_LINE> self.n_epochs: int = 10 <NEW_LINE> if isinstance(weights, BertModel): <NEW_LINE> <INDENT> self.embeddings = weights <NEW_LINE> self.embedding_dim: int = 768 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.embeddings = nn.Embedding.from_pretrained(weights) <NEW_LINE> self.embedding_dim: int = 300 <NEW_LINE> <DEDENT> self.lstm = nn.LSTM(self.embedding_dim, self.lstm_size) <NEW_LINE> self.hidden2code = nn.Linear(self.lstm_size, self.n_code) <NEW_LINE> <DEDENT> def forward(self, X: List[List[int]]) -> torch.tensor: <NEW_LINE> <INDENT> seq_lens = torch.Tensor([len(seq) for seq in X]) <NEW_LINE> X = [torch.LongTensor(samp) for samp in X] <NEW_LINE> pad_X = pad_sequence(X) <NEW_LINE> embeds = self.embeddings(pad_X) <NEW_LINE> if isinstance(self.embeddings, BertModel): <NEW_LINE> <INDENT> embeds = embeds[0] <NEW_LINE> <DEDENT> pack_X = pack_padded_sequence(embeds, seq_lens, enforce_sorted=False) <NEW_LINE> _, (h_n, _) = self.lstm(pack_X) <NEW_LINE> code_space = self.hidden2code(h_n) <NEW_LINE> code_scores = torch.sigmoid(code_space).squeeze() <NEW_LINE> return code_scores <NEW_LINE> <DEDENT> def fit(self, X: List[List[int]], Y: List[List[int]]): <NEW_LINE> <INDENT> dataset = ICDDataset(X, Y) <NEW_LINE> batcher = Batcher(dataset, batch_size=self.batch_size) <NEW_LINE> self.train() <NEW_LINE> loss_fn = nn.BCELoss() <NEW_LINE> optimizer = torch.optim.SGD(self.parameters(), lr=0.01, momentum=0.9) <NEW_LINE> print("Training LSTM .....") <NEW_LINE> for i in range(self.n_epochs): <NEW_LINE> <INDENT> print(f"\tEpoch {i}:", end=" ") <NEW_LINE> for X_batch, Y_batch in batcher: <NEW_LINE> <INDENT> self.zero_grad() <NEW_LINE> outputs = self.forward(X_batch) <NEW_LINE> loss = loss_fn(outputs, Y_batch) <NEW_LINE> loss.backward() <NEW_LINE> optimizer.step() <NEW_LINE> <DEDENT> print(f"loss = {loss}") <NEW_LINE> <DEDENT> print("done.") <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X: List[List[int]], threshold: float = 0.5) -> np.ndarray: <NEW_LINE> <INDENT> probs = self(X) <NEW_LINE> pos = torch.where(probs < threshold, probs, torch.ones(*probs.shape)) <NEW_LINE> neg = torch.where(pos > threshold, pos, torch.zeros(*probs.shape)) <NEW_LINE> preds = neg.long().numpy() <NEW_LINE> return preds <NEW_LINE> <DEDENT> def predict_proba(self, X: List[List[int]]) -> np.ndarray: <NEW_LINE> <INDENT> return self(X).detach().numpy()
An LSTM implementation with sklearn-like methods.
6259905dac7a0e7691f73b28
class PlanItem(EntityItem): <NEW_LINE> <INDENT> def __init__(self, patients, workspace_id, patient_id, entity): <NEW_LINE> <INDENT> super(PlanItem, self).__init__(patients, workspace_id, patient_id, entity) <NEW_LINE> <DEDENT> def _wait_delivery_information(self): <NEW_LINE> <INDENT> start = datetime.datetime.now() <NEW_LINE> DELAY = 0.2 <NEW_LINE> while (datetime.datetime.now() - start).total_seconds() < self._proknow.ENTITY_WAIT_TIMEOUT: <NEW_LINE> <INDENT> entity_status = self.data["status"] <NEW_LINE> if entity_status == 'completed': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(DELAY) <NEW_LINE> _, plan = self._requestor.get('/workspaces/' + self._workspace_id + '/plans/' + self._id) <NEW_LINE> self._update(plan) <NEW_LINE> <DEDENT> <DEDENT> raise TimeoutExceededError('Timeout exceeded while waiting for delivery information to reach completed status') <NEW_LINE> <DEDENT> def download(self, path): <NEW_LINE> <INDENT> assert isinstance(path, six.string_types), "`path` is required as a string." <NEW_LINE> if os.path.isdir(path): <NEW_LINE> <INDENT> resolved_path = os.path.join(os.path.abspath(path), "RP." + self._data["uid"] + ".dcm") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> absolute = os.path.abspath(path) <NEW_LINE> directory = os.path.dirname(path) <NEW_LINE> if os.path.isdir(directory): <NEW_LINE> <INDENT> resolved_path = absolute <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidPathError('`' + path + '` is invalid') <NEW_LINE> <DEDENT> <DEDENT> self._requestor.stream('/workspaces/' + self._workspace_id + '/plans/' + self._id + '/dicom', resolved_path) <NEW_LINE> return resolved_path <NEW_LINE> <DEDENT> def get_delivery_information(self): <NEW_LINE> <INDENT> self._wait_delivery_information() <NEW_LINE> headers = { 'ProKnow-Key': self.data["key"] } <NEW_LINE> _, content = self._requestor.get('/plans/' + self._id + '/delivery/' + self._data["data"]["delivery_tag"], headers=headers) <NEW_LINE> return content <NEW_LINE> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> _, plan = self._requestor.get('/workspaces/' + self._workspace_id + '/plans/' + self._id) <NEW_LINE> self._update(plan)
This class represents a plan. It's instantiated by the :class:`proknow.Patients.EntitySummary` class as a complete representation of a plan entity. Attributes: id (str): The id of the entity (readonly). data (dict): The complete representation of the entity as returned from the API (readonly).
6259905d3d592f4c4edbc523
class FileLinkShareForm(forms.Form): <NEW_LINE> <INDENT> email = forms.CharField(max_length=512, error_messages={ 'required': _("Email is required"), 'max_length': _("Email is not longer than 512 characters"), }) <NEW_LINE> file_shared_link = forms.CharField()
Form for sharing file shared link to emails.
6259905d56b00c62f0fb3f12
class Node: <NEW_LINE> <INDENT> def __init__(self, state, parent=None, action=None, path_cost=0): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def exapand(self, problem): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def child_node(self, problem, action): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def solution(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def path(self): <NEW_LINE> <INDENT> raise NotImplementedError
A node in a search tree. Contains a pointer to the parent (the node that this is a successor of) and to the actual state for this node. Note that if a state is arrived at by two paths, then there are two nodes with the same state. Also includes the action that got us to this state, and the total path_cost (also known as g) to reach the node. Other functions may add an f and h value; see best_first_graph_search and astar_search for an explanation of how the f and h values are handled. You will not need to subclass this class.
6259905d004d5f362081fb12
class OlderUpgrade(Exception): <NEW_LINE> <INDENT> def __init__(self, version, latest): <NEW_LINE> <INDENT> self.msg = "Upgrade version is older than latest version" <NEW_LINE> self.msg += "\n<upgrade version: '{}'>".format(version) <NEW_LINE> self.msg += "\n<latest version: '{}'>".format(latest) <NEW_LINE> super(self.__class__, self).__init__(self.msg)
Raised if a dataset already exists with submitted version.
6259905d7d847024c075da19
class AvatarServiceTests(SpyAgency, TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(AvatarServiceTests, self).setUp() <NEW_LINE> self.user = User(username='username', email='[email protected]', first_name='User', last_name='Name') <NEW_LINE> <DEDENT> def test_default_urls(self): <NEW_LINE> <INDENT> service = AvatarService() <NEW_LINE> request = HttpRequest() <NEW_LINE> with self.assertRaises(NotImplementedError): <NEW_LINE> <INDENT> service.get_avatar_urls(request, self.user, 32) <NEW_LINE> <DEDENT> <DEDENT> def test_render(self): <NEW_LINE> <INDENT> service = DummyAvatarService() <NEW_LINE> self.assertEqual( service.render(HttpRequest(), self.user, 24), '<img src="http://example.com/avatar.png" alt="User Name"' ' width="24" height="24"' ' srcset="http://example.com/avatar.png 1x">\n') <NEW_LINE> <DEDENT> def test_render_2x(self): <NEW_LINE> <INDENT> service = DummyAvatarService(use_2x=True) <NEW_LINE> self.assertEqual( service.render(HttpRequest(), self.user, 24), '<img src="http://example.com/avatar.png" alt="User Name"' ' width="24" height="24"' ' srcset="http://example.com/avatar.png 1x,' ' http://example.com/[email protected] 2x">\n') <NEW_LINE> <DEDENT> def test_get_avatar_urls_caching(self): <NEW_LINE> <INDENT> service = DummyAvatarService() <NEW_LINE> request = HttpRequest() <NEW_LINE> self.spy_on(service.get_avatar_urls_uncached) <NEW_LINE> self.assertIs(service.get_avatar_urls(request, self.user, 48), service.get_avatar_urls(request, self.user, 48)) <NEW_LINE> self.assertEqual(len(service.get_avatar_urls_uncached.calls), 1) <NEW_LINE> self.assertIs(service.get_avatar_urls(request, self.user, 32), service.get_avatar_urls(request, self.user, 32)) <NEW_LINE> self.assertEqual(len(service.get_avatar_urls_uncached.calls), 2)
Tests for djblets.avatars.services.base.
6259905d4f88993c371f1042
class FileHandler(Handler): <NEW_LINE> <INDENT> def __init__(self, filename, mode="a"): <NEW_LINE> <INDENT> Handler.__init__(self) <NEW_LINE> self.stream = open(filename, mode) <NEW_LINE> self.baseFilename = filename <NEW_LINE> self.mode = mode <NEW_LINE> <DEDENT> def reopen(self): <NEW_LINE> <INDENT> self.close() <NEW_LINE> self.stream = open(self.baseFilename, self.mode) <NEW_LINE> self.closed = False <NEW_LINE> <DEDENT> def remove(self): <NEW_LINE> <INDENT> self.close() <NEW_LINE> try: <NEW_LINE> <INDENT> os.remove(self.baseFilename) <NEW_LINE> <DEDENT> except OSError as why: <NEW_LINE> <INDENT> if why.args[0] != errno.ENOENT: <NEW_LINE> <INDENT> raise
File handler which supports reopening of logs.
6259905d0a50d4780f7068e2
class ProfileCompletionMiddleware: <NEW_LINE> <INDENT> def __init__(self, get_response): <NEW_LINE> <INDENT> self.get_response = get_response <NEW_LINE> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> if not request.user.is_anonymous: <NEW_LINE> <INDENT> if not request.user.is_staff: <NEW_LINE> <INDENT> profile = request.user.profile <NEW_LINE> if not profile.picture or not profile.biography: <NEW_LINE> <INDENT> if request.path != reverse("users:update_profile"): <NEW_LINE> <INDENT> return redirect("users:update_profile") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> response = self.get_response(request) <NEW_LINE> return response
Profile completion middleware Ensure that every user that is interacting with the platform has their profile picture and biography
6259905d01c39578d7f1425a
class TestCrawler(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.site = MockServer() <NEW_LINE> self.site.start() <NEW_LINE> self.settings = get_settings() <NEW_LINE> self.settings['EXTENSIONS']['scrapy.contrib.corestats.CoreStats'] = 0 <NEW_LINE> self.engine_status = [] <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.site.stop() <NEW_LINE> <DEDENT> def cb(self, response): <NEW_LINE> <INDENT> self.engine_status.append(get_engine_status(self.crawler.engine)) <NEW_LINE> <DEDENT> def _assert_no_requests(self): <NEW_LINE> <INDENT> self.assertEqual(len(self.engine_status), 0, self.engine_status) <NEW_LINE> stats = self.crawler.stats.get_stats() <NEW_LINE> self.assertNotIn('scheduler/enqueued', stats) <NEW_LINE> self.assertNotIn('scheduler/dequeued', stats) <NEW_LINE> self.assertNotIn('downloader/request_count', stats) <NEW_LINE> self.assertNotIn('downloader/response_count', stats) <NEW_LINE> <DEDENT> def _assert_engine_worked(self): <NEW_LINE> <INDENT> stats = self.crawler.stats.get_stats() <NEW_LINE> self.assertIn('start_time', stats) <NEW_LINE> self.assertIn('finish_time', stats) <NEW_LINE> self.assertEquals(stats['finish_reason'], 'finished') <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def test_crawl_start_requests_disabled(self): <NEW_LINE> <INDENT> self.crawler = GalaxyCrawler( SingleRequestSpider, self.settings, start_requests=False) <NEW_LINE> yield self.crawler.crawl(seed=self.site.url(), callback_func=self.cb) <NEW_LINE> self._assert_engine_worked() <NEW_LINE> self._assert_no_requests() <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def test_crawl_start_requests_enabled(self): <NEW_LINE> <INDENT> self.crawler = GalaxyCrawler( SingleRequestSpider, self.settings, start_requests=True) <NEW_LINE> yield self.crawler.crawl(seed=self.site.url(), callback_func=self.cb) <NEW_LINE> self._assert_engine_worked() <NEW_LINE> self.assertEqual(len(self.engine_status), 1, self.engine_status) <NEW_LINE> est = dict(self.engine_status[0]) <NEW_LINE> self.assertEqual(est['engine.spider.name'], self.crawler.spider.name) <NEW_LINE> self.assertEqual(est['len(engine.scraper.slot.active)'], 1) <NEW_LINE> stats = self.crawler.stats.get_stats() <NEW_LINE> self.assertEqual(stats['scheduler/enqueued'], 1) <NEW_LINE> self.assertEqual(stats['scheduler/dequeued'], 1) <NEW_LINE> self.assertEqual(stats['downloader/request_count'], 1) <NEW_LINE> self.assertEqual(stats['downloader/response_count'], 1) <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def test_crawl_start_requests_default(self): <NEW_LINE> <INDENT> self.crawler = GalaxyCrawler(SingleRequestSpider, self.settings) <NEW_LINE> yield self.crawler.crawl(seed=self.site.url(), callback_func=self.cb) <NEW_LINE> self._assert_engine_worked() <NEW_LINE> self._assert_no_requests()
Spider shouldn't make start requests if list of start_requests wasn't passed to 'crawl' method.
6259905dd486a94d0ba2d60f
class User(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.CharField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserManager() <NEW_LINE> USERNAME_FIELD = 'email'
custom user model that supports using email address instead of username
6259905dd268445f2663a680
class IscsiSessionEndpoints(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'local_endpoint': 'ScsiProtocolEndpoint', 'remote_endpoint': 'ScsiProtocolEndpoint' } <NEW_LINE> self.attribute_map = { 'local_endpoint': 'localEndpoint', 'remote_endpoint': 'remoteEndpoint' } <NEW_LINE> self._local_endpoint = None <NEW_LINE> self._remote_endpoint = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def local_endpoint(self): <NEW_LINE> <INDENT> return self._local_endpoint <NEW_LINE> <DEDENT> @local_endpoint.setter <NEW_LINE> def local_endpoint(self, local_endpoint): <NEW_LINE> <INDENT> self._local_endpoint = local_endpoint <NEW_LINE> <DEDENT> @property <NEW_LINE> def remote_endpoint(self): <NEW_LINE> <INDENT> return self._remote_endpoint <NEW_LINE> <DEDENT> @remote_endpoint.setter <NEW_LINE> def remote_endpoint(self, remote_endpoint): <NEW_LINE> <INDENT> self._remote_endpoint = remote_endpoint <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is None or other is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259905d097d151d1a2c26b5
class Curve(IdentifiedObject): <NEW_LINE> <INDENT> y2_unit = UnitSymbol <NEW_LINE> x_unit = UnitSymbol <NEW_LINE> curve_style = CurveStyle <NEW_LINE> y1_unit = UnitSymbol <NEW_LINE> pass
Relationship between an independent variable (X-axis) and one or two dependent variables (Y1-axis and Y2-axis). Curves can also serve as schedules.
6259905df548e778e596cbd1
class TFMT5Model(TFT5Model): <NEW_LINE> <INDENT> model_type = "mt5" <NEW_LINE> config_class = MT5Config
This class overrides :class:`~transformers.TFT5Model`. Please check the superclass for the appropriate documentation alongside usage examples. Examples:: >>> from transformers import TFMT5Model, T5Tokenizer >>> model = TFMT5Model.from_pretrained("google/mt5-small") >>> tokenizer = T5Tokenizer.from_pretrained("google/mt5-small") >>> article = "UN Offizier sagt, dass weiter verhandelt werden muss in Syrien." >>> summary = "Weiter Verhandlung in Syrien." >>> batch = tokenizer.prepare_seq2seq_batch(src_texts=[article], tgt_texts=[summary], return_tensors="tf") >>> batch["decoder_input_ids"] = batch["labels"] >>> del batch["labels"] >>> outputs = model(batch) >>> hidden_states = outputs.last_hidden_state
6259905d7d847024c075da1a
class VBox(Gtk.VBox): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> Gtk.VBox.__init__(self) <NEW_LINE> self.set_spacing(6) <NEW_LINE> self.set_border_width(0) <NEW_LINE> for widget in args: <NEW_LINE> <INDENT> self.pack_start(widget)
A vertical container
6259905d097d151d1a2c26b6
class FlatBottomReceptorLigandRestraint(ReceptorLigandRestraint): <NEW_LINE> <INDENT> energy_function = 'lambda_restraints * step(r-r0) * (K/2)*(r-r0)^2' <NEW_LINE> bond_parameter_names = ['K', 'r0'] <NEW_LINE> def _determineBondParameters(self): <NEW_LINE> <INDENT> x_unit = self.coordinates.unit <NEW_LINE> x = self.coordinates[self.receptor_atoms,:] / x_unit <NEW_LINE> natoms = x.shape[0] <NEW_LINE> xref = numpy.reshape(x[self.restrained_receptor_atom,:], (1,3)) <NEW_LINE> distances = numpy.sqrt(((x - numpy.tile(xref, (natoms, 1)))**2).sum(1)) <NEW_LINE> median_absolute_distance = numpy.median(abs(distances)) <NEW_LINE> median_absolute_distance *= x_unit <NEW_LINE> sigma = 1.4826 * median_absolute_distance <NEW_LINE> r0 = 2*sigma + 5.0 * units.angstroms <NEW_LINE> logger.debug("restraint distance r0 = %.1f A" % (r0 / units.angstroms)) <NEW_LINE> K = 0.6 * units.kilocalories_per_mole / units.angstroms**2 <NEW_LINE> logger.debug("K = %.1f kcal/mol/A^2" % (K / (units.kilocalories_per_mole / units.angstroms**2))) <NEW_LINE> bond_parameters = [K, r0] <NEW_LINE> return bond_parameters
An alternative choice to receptor-ligand restraints that uses a flat potential inside most of the protein volume with harmonic restraining walls outside of this. EXAMPLE >>> # Create a test system. >>> from repex import testsystems >>> system_container = testsystems.LysozymeImplicit() >>> (system, positions) = system_container.system, system_container.positions >>> # Identify receptor and ligand atoms. >>> receptor_atoms = range(0,2603) >>> ligand_atoms = range(2603,2621) >>> # Construct a reference thermodynamic state. >>> from oldrepex import ThermodynamicState >>> temperature = 298.0 * units.kelvin >>> state = ThermodynamicState(temperature=temperature) >>> # Create restraints. >>> restraints = FlatBottomReceptorLigandRestraint(state, system, positions, receptor_atoms, ligand_atoms) >>> # Get standard state correction. >>> correction = restraints.getStandardStateCorrection() >>> # Get radius of gyration of receptor. >>> rg = restraints.getReceptorRadiusOfGyration()
6259905d8da39b475be0482e
class Solution: <NEW_LINE> <INDENT> def copyRandomList(self, head: 'Node') -> 'Node': <NEW_LINE> <INDENT> def copyList(head: Node) ->Node: <NEW_LINE> <INDENT> if head is None: <NEW_LINE> <INDENT> return head <NEW_LINE> <DEDENT> if head in visited: <NEW_LINE> <INDENT> return visited[head] <NEW_LINE> <DEDENT> node = Node(head.val, None, None) <NEW_LINE> visited[head] = node <NEW_LINE> node.next = copyList(head.next) <NEW_LINE> node.random = copyList(head.random) <NEW_LINE> return node <NEW_LINE> <DEDENT> visited = {} <NEW_LINE> return copyList(head)
使用DFS,并记录遍历过的节点,否则的话递归栈会溢出(因为可能存在环)
6259905d1f037a2d8b9e538f
class NoWarnFilter(logging.Filter): <NEW_LINE> <INDENT> def filter(self, record): <NEW_LINE> <INDENT> return not (record.levelno == logging.WARN or record.levelno == logging.ERROR)
Filter out any records that are warnings or errors. (This is useful if your warnings and errors are sent to their own handler, e.g. stderr.)
6259905dd6c5a102081e376a
class BookmarkTypeInputHandler(sublime_plugin.ListInputHandler): <NEW_LINE> <INDENT> names = { "file": "Current file", "topic": "Current topic", "view": "Current view" } <NEW_LINE> descs = { "file": "Bookmark the current help file", "topic": "Bookmark the topic currently under the cursor", "view": "Bookmark the current help view" } <NEW_LINE> def __init__(self, help_view): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.view = help_view <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return "bmark_type" <NEW_LINE> <DEDENT> def placeholder(self): <NEW_LINE> <INDENT> return "bookmark type" <NEW_LINE> <DEDENT> def preview(self, value): <NEW_LINE> <INDENT> return sublime.Html("<strong>{}</strong>: <em>{}</em>".format( self.names.get(value, "unknown"), self.descs.get(value, "unknown") )) <NEW_LINE> <DEDENT> def list_items(self): <NEW_LINE> <INDENT> items = [ ("this file", "file"), ("this view", "view") ] <NEW_LINE> if len(self.view.sel()) > 0: <NEW_LINE> <INDENT> pt = self.view.sel()[0].b <NEW_LINE> if self.view.match_selector(pt, "meta.link"): <NEW_LINE> <INDENT> items.insert(1, ("this topic", "topic")) <NEW_LINE> <DEDENT> <DEDENT> return items <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> self.bmark_type = value <NEW_LINE> return True <NEW_LINE> <DEDENT> def next_input(self, args): <NEW_LINE> <INDENT> if args.get("bmark_name") is None: <NEW_LINE> <INDENT> return BookmarkNameInputHandler(find_help_view(), self.bmark_type)
Allow the user to select what type of bookmark they want to create by displaying a list of the available bookmark types that are valid in the current context.
6259905d07f4c71912bb0a84
class TokenAuthentication(BaseAuthentication): <NEW_LINE> <INDENT> model = Token <NEW_LINE> def authenticate(self, request): <NEW_LINE> <INDENT> auth = get_authorization_header(request).split() <NEW_LINE> if not auth or auth[0].lower() != b'token': <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if len(auth) == 1: <NEW_LINE> <INDENT> msg = _('Invalid token header. No credentials provided.') <NEW_LINE> raise exceptions.AuthenticationFailed(msg) <NEW_LINE> <DEDENT> elif len(auth) > 2: <NEW_LINE> <INDENT> msg = _('Invalid token header. Token string should not contain spaces.') <NEW_LINE> raise exceptions.AuthenticationFailed(msg) <NEW_LINE> <DEDENT> return self.authenticate_credentials(auth[1]) <NEW_LINE> <DEDENT> def authenticate_credentials(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> token = self.model.objects.select_related('user').get(key=key) <NEW_LINE> <DEDENT> except self.model.DoesNotExist: <NEW_LINE> <INDENT> raise exceptions.AuthenticationFailed(_('Invalid token.')) <NEW_LINE> <DEDENT> if not token.user.is_active: <NEW_LINE> <INDENT> raise exceptions.AuthenticationFailed(_('User inactive or deleted.')) <NEW_LINE> <DEDENT> return (token.user, token) <NEW_LINE> <DEDENT> def authenticate_header(self, request): <NEW_LINE> <INDENT> return 'Token'
Simple token based authentication. Clients should authenticate by passing the token key in the "Authorization" HTTP header, prepended with the string "Token ". For example: Authorization: Token 401f7ac837da42b97f613d789819ff93537bee6a
6259905d3539df3088ecd8e4
class DataIntegration(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data
Data Integration
6259905d1f5feb6acb164232
class PRO_001c: <NEW_LINE> <INDENT> play = Summon(CONTROLLER, RandomEntourage())
Power of the Horde
6259905d2ae34c7f260ac72f
class GameHandler(WSHandlerMixin, WebSocketHandler): <NEW_LINE> <INDENT> def _take_action(self, json, user_id): <NEW_LINE> <INDENT> action_name = json.pop("action", "") <NEW_LINE> action_class = phase.get_action(self.state.phase.current, action_name) <NEW_LINE> if not action_class: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> player = self.state.create_player(user_id) <NEW_LINE> action = action_class(player) <NEW_LINE> if action.can_next: <NEW_LINE> <INDENT> action.act(**json) <NEW_LINE> action.next() <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def on_message(self, message): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> json = self.to_json(message) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> yield <NEW_LINE> raise gen.Return() <NEW_LINE> <DEDENT> sid = json.pop("session_id") <NEW_LINE> session = Session(self.adaptor, session_id=sid) <NEW_LINE> if session.user_id: <NEW_LINE> <INDENT> self._take_action(json, session.user_id) <NEW_LINE> yield self.write_on_same_room() <NEW_LINE> yield self.handle_ai() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield self.write_on_same_room() <NEW_LINE> <DEDENT> <DEDENT> @gen.coroutine <NEW_LINE> def handle_ai(self): <NEW_LINE> <INDENT> for player in self.state.player_AIs: <NEW_LINE> <INDENT> json = player._AI.get_action() <NEW_LINE> if json: <NEW_LINE> <INDENT> logger.info("AI action: %s => %s" % (player, json)) <NEW_LINE> yield gen.sleep(1) <NEW_LINE> if self._take_action(json, player.user_id): <NEW_LINE> <INDENT> yield self.write_on_same_room() <NEW_LINE> yield self.handle_ai() <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @gen.coroutine <NEW_LINE> def write_on_same_room(self): <NEW_LINE> <INDENT> for con, sid in WSHandlerMixin.connections[self.path]: <NEW_LINE> <INDENT> session = Session(self.adaptor, session_id=sid) <NEW_LINE> message = tornado.escape.json_encode(session.state.to_json()) <NEW_LINE> try: <NEW_LINE> <INDENT> con.write_message(message) <NEW_LINE> <DEDENT> except tornado.websocket.WebSocketClosedError: <NEW_LINE> <INDENT> pass
Returns a game state which any player can get. Every time a player change a state, including a private state, all the player have to update the public state. Also there are some audience, and then they have to update it.
6259905d7b25080760ed8804
class Float(MapUnary): <NEW_LINE> <INDENT> def __init__(self, publisher: Publisher) -> None: <NEW_LINE> <INDENT> MapUnary.__init__(self, publisher, float)
Implementing the functionality of float() for publishers.
6259905d63b5f9789fe867bb
class DFSTree: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root: Node = None <NEW_LINE> <DEDENT> def _insertHelper(self, root, node): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> root = node <NEW_LINE> return node <NEW_LINE> <DEDENT> if node.data < root.data: <NEW_LINE> <INDENT> root.left = self._insertHelper(root.left, node) <NEW_LINE> <DEDENT> elif node.data > root.data: <NEW_LINE> <INDENT> root.right = self._insertHelper(root.right, node) <NEW_LINE> <DEDENT> return root <NEW_LINE> <DEDENT> def _postOrderHelper(self, node): <NEW_LINE> <INDENT> if node is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._postOrderHelper(node.left) <NEW_LINE> self._postOrderHelper(node.right) <NEW_LINE> print(node.data, end=' ') <NEW_LINE> <DEDENT> def _inOrderHelper(self, node): <NEW_LINE> <INDENT> if node is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._inOrderHelper(node.left) <NEW_LINE> print(node.data, end=' ') <NEW_LINE> self._inOrderHelper(node.right) <NEW_LINE> <DEDENT> def _preOrderHelper(self, node): <NEW_LINE> <INDENT> if node is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> print(node.data, end=' ') <NEW_LINE> self._preOrderHelper(node.left) <NEW_LINE> self._preOrderHelper(node.right) <NEW_LINE> <DEDENT> def postOrder(self): <NEW_LINE> <INDENT> self._postOrderHelper(self.root) <NEW_LINE> print() <NEW_LINE> <DEDENT> def inOrder(self): <NEW_LINE> <INDENT> self._inOrderHelper(self.root) <NEW_LINE> print() <NEW_LINE> <DEDENT> def preOrder(self): <NEW_LINE> <INDENT> self._preOrderHelper(self.root) <NEW_LINE> print() <NEW_LINE> <DEDENT> def insert(self, data): <NEW_LINE> <INDENT> node = Node(data) <NEW_LINE> self.root = self._insertHelper(self.root, node)
Depth-First Binary Search Tree implementation class. Attributes: ----------- root : Node Root node of the tree.
6259905d6e29344779b01c97
class Stack(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.items = [] <NEW_LINE> <DEDENT> def push (self, item): <NEW_LINE> <INDENT> self.items.append(item); <NEW_LINE> <DEDENT> def pop (self): <NEW_LINE> <INDENT> return self.items.pop() <NEW_LINE> <DEDENT> def size (self): <NEW_LINE> <INDENT> return len(self.items) <NEW_LINE> <DEDENT> def isEmpty (self): <NEW_LINE> <INDENT> return self.items == [] <NEW_LINE> <DEDENT> def isNotEmpty (self): <NEW_LINE> <INDENT> return self.items != [] <NEW_LINE> <DEDENT> def last (self): <NEW_LINE> <INDENT> return self.items[-1]
(numb or operand) -> list реализует стек
6259905d3c8af77a43b68a65
class AuthPageLocators(object): <NEW_LINE> <INDENT> USERNAME_LOCATOR = (By.ID, 'username') <NEW_LINE> PASSWORD_LOCATOR = (By.NAME, 'password') <NEW_LINE> LOGIN_BUTTON_LOCATOR = (By.NAME, 'submit') <NEW_LINE> LOGIN_FORM_LOCATOR = (By.ID, 'loginform')
Локаторы страницы аутентификации
6259905d3617ad0b5ee07795
class DateRangeScraper(Scraper): <NEW_LINE> <INDENT> def __init__(self, min_date, max_date, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> assert(isinstance(min_date, datetime.date)) <NEW_LINE> assert(isinstance(max_date, datetime.date)) <NEW_LINE> assert(not isinstance(min_date, datetime.datetime)) <NEW_LINE> assert(not isinstance(max_date, datetime.datetime)) <NEW_LINE> self.min_date = min_date <NEW_LINE> self.max_date = max_date <NEW_LINE> self.dates = tuple(self._get_dates(self.min_date, self.max_date)) <NEW_LINE> <DEDENT> def _get_dates(self, min_date, max_date): <NEW_LINE> <INDENT> for n in range((max_date - min_date).days + 1): <NEW_LINE> <INDENT> yield min_date + datetime.timedelta(days=n) <NEW_LINE> <DEDENT> <DEDENT> def postprocess(self, articles): <NEW_LINE> <INDENT> articles = list(super(DateRangeScraper, self).postprocess(articles)) <NEW_LINE> for article in articles: <NEW_LINE> <INDENT> date = to_date(article.properties["date"]) <NEW_LINE> is_proper_date = self.min_date <= date <= self.max_date <NEW_LINE> if article.parent is None and article.properties.get("parent") is None: <NEW_LINE> <INDENT> error_msg = "{date} not within [{self.min_date}, {self.max_date}]" <NEW_LINE> raise ValueError(error_msg.format(**locals())) <NEW_LINE> <DEDENT> <DEDENT> return articles
Omits any articles that haven't been published in a given period. Provides a first_date and last_date option which children classes can use to select data from their resource.
6259905da17c0f6771d5d6c7
class IntegerValidator(Validator): <NEW_LINE> <INDENT> def validate(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = int(value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return self.error(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result
Check is value can be safelly converted to int
6259905d45492302aabfdb22
class IMyLayer(Interface): <NEW_LINE> <INDENT> pass
marker interface for a layer for testing purposes
6259905d8e7ae83300eea6d7
class DefaultRouter(routers.DefaultRouter): <NEW_LINE> <INDENT> def extend(self, router): <NEW_LINE> <INDENT> self.registry.extend(router.registry)
Extends `DefaultRouter` class to add a method for extending url routes from another router.
6259905dd268445f2663a681
class myFieldStorage(cgi.FieldStorage): <NEW_LINE> <INDENT> def make_file(self, binary=None): <NEW_LINE> <INDENT> return tempfile.NamedTemporaryFile()
Our version uses a named temporary file instead of the default non-named file; keeping it visibile (named), allows us to create a 2nd link after the upload is done, thus avoiding the overhead of making a copy to the destination filename.
6259905d4a966d76dd5f053c
class Quantizer(pipeline.Pipeline): <NEW_LINE> <INDENT> def __init__(self, steps_per_quarter=4): <NEW_LINE> <INDENT> super(Quantizer, self).__init__( input_type=music_pb2.NoteSequence, output_type=music_pb2.NoteSequence) <NEW_LINE> self._steps_per_quarter = steps_per_quarter <NEW_LINE> <DEDENT> def transform(self, note_sequence): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> quantized_sequence = sequences_lib.quantize_note_sequence( note_sequence, self._steps_per_quarter) <NEW_LINE> return [quantized_sequence] <NEW_LINE> <DEDENT> except sequences_lib.MultipleTimeSignatureException as e: <NEW_LINE> <INDENT> tf.logging.debug('Multiple time signatures found in NoteSequence %s: %s', note_sequence.filename, e) <NEW_LINE> self._set_stats([statistics.Counter( 'sequences_discarded_because_multiple_time_signatures', 1)]) <NEW_LINE> return [] <NEW_LINE> <DEDENT> except sequences_lib.MultipleTempoException as e: <NEW_LINE> <INDENT> tf.logging.debug('Multiple tempos found in NoteSequence %s: %s', note_sequence.filename, e) <NEW_LINE> self._set_stats([statistics.Counter( 'sequences_discarded_because_multiple_tempos', 1)]) <NEW_LINE> return []
A Module that quantizes NoteSequence data.
6259905dd7e4931a7ef3d6a9
class BookTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.app = create_app() <NEW_LINE> self.client = self.app.test_client <NEW_LINE> self.database_path = 'postgres://john@localhost:5432/bookshelf' <NEW_LINE> setup_db(self.app, self.database_path) <NEW_LINE> self.new_book = { 'title': 'Anansi Boys', 'author': 'Neil Gaiman', 'rating': 5 } <NEW_LINE> with self.app.app_context(): <NEW_LINE> <INDENT> self.db = SQLAlchemy() <NEW_LINE> self.db.init_app(self.app) <NEW_LINE> self.db.create_all() <NEW_LINE> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_a_certain_book_with_results(self): <NEW_LINE> <INDENT> res = self.client().post('/books', json={'search': 'no'}) <NEW_LINE> data = json.loads(res.data) <NEW_LINE> self.assertEqual(res.status_code, 200) <NEW_LINE> self.assertEqual(data['success'], True) <NEW_LINE> self.assertTrue(data['total_books']) <NEW_LINE> self.assertTrue(len(data['books'])) <NEW_LINE> <DEDENT> def test_get_a_certain_book_without_results(self): <NEW_LINE> <INDENT> res = self.client().post('/books', json={'search': 'kakaadj'}) <NEW_LINE> data = json.loads(res.data) <NEW_LINE> self.assertEqual(res.status_code, 200) <NEW_LINE> self.assertEqual(data['success'], True) <NEW_LINE> self.assertTrue(data['total_books'], 0) <NEW_LINE> self.assertEqual(len(data['books']), 0)
This class represents the trivia test case
6259905d097d151d1a2c26b8
class InvalidTemplateError(Exception): <NEW_LINE> <INDENT> pass
Raised on Graphite template configuration validation errors
6259905d8da39b475be04830
class Parameters(HasObservers): <NEW_LINE> <INDENT> pass
This object is used to get and set the values of named parameters for a vehicle. See the following links for information about the supported parameters for each platform: `Copter <http://copter.ardupilot.com/wiki/configuration/arducopter-parameters/>`_, `Plane <http://plane.ardupilot.com/wiki/arduplane-parameters/>`_, `Rover <http://rover.ardupilot.com/wiki/apmrover2-parameters/>`_. Attribute names are generated automatically based on parameter names. The example below shows how to get and set the value of a parameter. Note that 'set' operations are not guaranteed to be complete until :py:func:`flush() <Vehicle.flush>` is called on the parent :py:class:`Vehicle` object. .. code:: python # Print the value of the THR_MIN parameter. print "Param: %s" % vehicle.parameters['THR_MIN'] # Change the parameter value to something different. vehicle.parameters['THR_MIN']=100 vehicle.flush() .. note:: At time of writing ``Parameters`` does not implement the observer methods, and change notification for parameters is not supported. .. todo:: Check to see if observers have been implemented and if so, update the information here, in about, and in Vehicle class: https://github.com/dronekit/dronekit-python/issues/107
6259905dac7a0e7691f73b2c
class TestRelated(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testRelated(self): <NEW_LINE> <INDENT> pass
Related unit test stubs
6259905d3eb6a72ae038bca9
class Root(Solver): <NEW_LINE> <INDENT> def solve(self, vars, consts=(), *, func: Callable = None, all_out: bool = False, **kwargs): <NEW_LINE> <INDENT> if func is None: <NEW_LINE> <INDENT> func = self.func <NEW_LINE> <DEDENT> vars = np.asarray(vars, dtype=float) <NEW_LINE> if not isinstance(consts, (tuple)): <NEW_LINE> <INDENT> raise ValueError( f'consts must be a tuple, not {type(consts).__name__}') <NEW_LINE> <DEDENT> if 'tol' not in kwargs: <NEW_LINE> <INDENT> kwargs['tol'] = self.atol <NEW_LINE> <DEDENT> if 'options' not in kwargs: <NEW_LINE> <INDENT> kwargs['options'] = {} <NEW_LINE> <DEDENT> if 'maxiter' not in kwargs['options']: <NEW_LINE> <INDENT> kwargs['options']['maxiter'] = self.max_runs <NEW_LINE> <DEDENT> out = optimize.root(func, vars, args=consts, **kwargs) <NEW_LINE> return out if all_out else out.x
Scipy Solver root - return only final value
6259905d7b25080760ed8805