code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class TestSuiteJSONSchema(Base): <NEW_LINE> <INDENT> __tablename__ = 'TestSuiteJSONSchemas' <NEW_LINE> testsuite_name = Column("TestSuiteName", String(256), primary_key=True) <NEW_LINE> jsonschema = Column("JSONSchema", Binary) <NEW_LINE> def __init__(self, testsuite_name, data): <NEW_LINE> <INDENT> self.testsuite_name = testsuite_name <NEW_LINE> self.jsonschema = json.dumps(data, sort_keys=True).encode('utf-8') | Saves the json schema used when creating a testsuite. Only used for suites
created with a json schema description. | 625990520c0af96317c577d4 |
class MutableURL(object): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.parts = urlparse(url) <NEW_LINE> self.query = dict(parse_qsl(self.parts[4])) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> scheme, netloc, path, params, query, fragment = self.parts <NEW_LINE> query = urlencode(utf8dict(items(self.query))) <NEW_LINE> components = [scheme + '://', netloc, path or '/', ';{0}'.format(params) if params else '', '?{0}'.format(query) if query else '', '#{0}'.format(fragment) if fragment else ''] <NEW_LINE> return ''.join(c for c in components if c) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{0}: {1}>'.format(type(self).__name__, self) | Object wrapping a Uniform Resource Locator.
Supports editing the query parameter list.
You can convert the object back to a string, the query will be
properly urlencoded.
Examples
>>> url = URL('http://www.google.com:6580/foo/bar?x=3&y=4#foo')
>>> url.query
{'x': '3', 'y': '4'}
>>> str(url)
'http://www.google.com:6580/foo/bar?y=4&x=3#foo'
>>> url.query['x'] = 10
>>> url.query.update({'George': 'Costanza'})
>>> str(url)
'http://www.google.com:6580/foo/bar?y=4&x=10&George=Costanza#foo' | 6259905276e4537e8c3f0a71 |
class Principal(object): <NEW_LINE> <INDENT> swagger_types = { 'email': 'str', 'name': 'str' } <NEW_LINE> attribute_map = { 'email': 'email', 'name': 'name' } <NEW_LINE> def __init__(self, email=None, name=None): <NEW_LINE> <INDENT> self._email = None <NEW_LINE> self._name = None <NEW_LINE> self.discriminator = None <NEW_LINE> if email is not None: <NEW_LINE> <INDENT> self.email = email <NEW_LINE> <DEDENT> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def email(self): <NEW_LINE> <INDENT> return self._email <NEW_LINE> <DEDENT> @email.setter <NEW_LINE> def email(self, email): <NEW_LINE> <INDENT> self._email = email <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Principal): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259905263d6d428bbee3cb7 |
class QSingleton(Singleton, type(QObject)): <NEW_LINE> <INDENT> pass | A metaclass for making Qt objects singletons | 62599052009cb60464d02a24 |
class Combatant: <NEW_LINE> <INDENT> def __init__(self, combat_wait): <NEW_LINE> <INDENT> self.combat_wait = combat_wait <NEW_LINE> <DEDENT> def decide_combat_moves(self, gm, moveset): <NEW_LINE> <INDENT> self.decide_melee_moves(gm, moveset) <NEW_LINE> self.decide_close_moves(gm, moveset) <NEW_LINE> return moveset <NEW_LINE> <DEDENT> def decide_melee_moves(self, gm, moveset): <NEW_LINE> <INDENT> locs = np.transpose(np.nonzero(gm.melee_mat)) <NEW_LINE> strns = [gm.strn[x, y] for (x, y) in locs] <NEW_LINE> for ci in np.argsort(strns)[::-1]: <NEW_LINE> <INDENT> cx, cy = locs[ci] <NEW_LINE> if gm.strnc[cx, cy] < (gm.prodc[cx, cy] * self.combat_wait): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> nbrs = gm.nbrs[(cx, cy)] <NEW_LINE> scores = [gm.combat_heur[nx, ny] - (gm.wall[nx, ny] * 100000) for (nx, ny) in nbrs] <NEW_LINE> nx, ny = nbrs[np.argmax(scores)] <NEW_LINE> if gm.total_strn < gm.strn[nx, ny] or not gm.safe_to_take[nx, ny]: <NEW_LINE> <INDENT> nx, ny = cx, cy <NEW_LINE> <DEDENT> gm.combat_heur[nx, ny] /= 10000 <NEW_LINE> moveset.add_move(cx, cy, nx, ny) <NEW_LINE> <DEDENT> <DEDENT> def decide_close_moves(self, gm, moveset): <NEW_LINE> <INDENT> locs = np.transpose(np.nonzero(gm.close_to_combat)) <NEW_LINE> for cx, cy in locs: <NEW_LINE> <INDENT> if gm.strnc[cx, cy] < (gm.prodc[cx, cy] * self.combat_wait): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if gm.dist_from_combat[cx, cy] > 6: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> dmat = np.divide(gm.melee_mat, gm.dists[cx, cy]) <NEW_LINE> tx, ty = np.unravel_index(dmat.argmax(), dmat.shape) <NEW_LINE> if gm.total_strn < gm.strn[tx, ty] or not gm.safe_to_take[tx, ty]: <NEW_LINE> <INDENT> tx, ty = cx, cy <NEW_LINE> <DEDENT> if gm.dists[cx, cy, tx, ty] < 4 and gm.strnc[cx, cy] < (gm.prodc[cx, cy] * (self.combat_wait + 1.5)): <NEW_LINE> <INDENT> moveset.add_move(cx, cy, cx, cy) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> moveset.add_move(cx, cy, tx, ty) | Handle all the moves for combat zones. | 625990523539df3088ecd78c |
class RandomWalk(): <NEW_LINE> <INDENT> def __init__(self, num_points=5000): <NEW_LINE> <INDENT> self.num_points = num_points <NEW_LINE> self.x_values = [0] <NEW_LINE> self.y_values = [0] <NEW_LINE> <DEDENT> def get_step(self): <NEW_LINE> <INDENT> direction = choice([1, -1]) <NEW_LINE> distance = choice(list(range(16))) <NEW_LINE> step = direction * distance <NEW_LINE> return step <NEW_LINE> <DEDENT> def fill_walk(self): <NEW_LINE> <INDENT> while len(self.x_values) < self.num_points: <NEW_LINE> <INDENT> x_step = self.get_step() <NEW_LINE> y_step = self.get_step() <NEW_LINE> if x_step == 0 and y_step == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> next_x = self.x_values[-1] + x_step <NEW_LINE> next_y = self.y_values[-1] + y_step <NEW_LINE> self.x_values.append(next_x) <NEW_LINE> self.y_values.append(next_y) | A class to generate random walks. | 62599052a8ecb033258726fd |
class ScopSearch(unittest.TestCase): <NEW_LINE> <INDENT> def test_search(self): <NEW_LINE> <INDENT> handle = SCOP.search("1JOY") | SCOP search tests. | 6259905215baa72349463478 |
class Meta: <NEW_LINE> <INDENT> model_class = ConnectionRequest | Connection request schema metadata. | 6259905210dbd63aa1c720ca |
class TwentyFortyEight: <NEW_LINE> <INDENT> def __init__(self, grid_height, grid_width): <NEW_LINE> <INDENT> self._height=grid_height <NEW_LINE> self._width=grid_width <NEW_LINE> self.grid=[] <NEW_LINE> self._INITI_TILES={} <NEW_LINE> self._INITI_TILES[UP]= [(0, index) for index in range(self._width)] <NEW_LINE> self._INITI_TILES[DOWN]= [(self._height-1, index) for index in range(self._width)] <NEW_LINE> self._INITI_TILES[LEFT]= [(index,0) for index in range(self._height)] <NEW_LINE> self._INITI_TILES[RIGHT]= [(index, self._width-1) for index in range(self._height)] <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> for dummy_i in range(0,self._height): <NEW_LINE> <INDENT> self.grid.append(self._width*[0]) <NEW_LINE> <DEDENT> self.new_tile() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> girdstringlist=[] <NEW_LINE> for item in self.grid: <NEW_LINE> <INDENT> girdstringlist.append(item) <NEW_LINE> output_str=str(girdstringlist) <NEW_LINE> output_str.replace(",",''[:2]) <NEW_LINE> <DEDENT> return output_str <NEW_LINE> <DEDENT> def get_grid_height(self): <NEW_LINE> <INDENT> return self._height <NEW_LINE> <DEDENT> def get_grid_width(self): <NEW_LINE> <INDENT> return self._width <NEW_LINE> <DEDENT> def move(self, direction): <NEW_LINE> <INDENT> move_flag=0 <NEW_LINE> for item in self._INITI_TILES[direction]: <NEW_LINE> <INDENT> temporary_list=[] <NEW_LINE> indice_list=[] <NEW_LINE> indice_range=int(math.fabs(OFFSETS[direction][0])*self._height+math.fabs(OFFSETS[direction][1])*self._width) <NEW_LINE> for num in range(indice_range): <NEW_LINE> <INDENT> indice_list.append((item[0]+num*OFFSETS[direction][0],item[1]+num*OFFSETS[direction][1])) <NEW_LINE> temporary_list.append(self.grid[indice_list[num][0]][indice_list[num][1]]) <NEW_LINE> <DEDENT> merge_list=merge(temporary_list) <NEW_LINE> for num in range(indice_range): <NEW_LINE> <INDENT> if self.grid[indice_list[num][0]][indice_list[num][1]]!=merge_list[num]: <NEW_LINE> <INDENT> move_flag+=1 <NEW_LINE> <DEDENT> self.grid[indice_list[num][0]][indice_list[num][1]]=merge_list[num] <NEW_LINE> <DEDENT> <DEDENT> if move_flag>0: <NEW_LINE> <INDENT> self.new_tile() <NEW_LINE> <DEDENT> <DEDENT> def new_tile(self): <NEW_LINE> <INDENT> emptylist=[] <NEW_LINE> for dummy_row in range(0,self._height): <NEW_LINE> <INDENT> for dummy_col in range(0,self._width): <NEW_LINE> <INDENT> if self.get_tile(dummy_row,dummy_col)==0: <NEW_LINE> <INDENT> emptylist.append((dummy_row,dummy_col)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> random_num=random.randrange(0, 10) <NEW_LINE> if random_num==9: <NEW_LINE> <INDENT> random_indice=random.choice(emptylist) <NEW_LINE> self.set_tile(random_indice[0],random_indice[1],4) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> random_indice=random.choice(emptylist) <NEW_LINE> self.set_tile(random_indice[0],random_indice[1],2) <NEW_LINE> <DEDENT> <DEDENT> def set_tile(self, row, col, value): <NEW_LINE> <INDENT> self.grid[row][col]=value <NEW_LINE> <DEDENT> def get_tile(self, row, col): <NEW_LINE> <INDENT> return self.grid[row][col] | Class to run the game logic. | 62599052d6c5a102081e3604 |
class QLearning(object): <NEW_LINE> <INDENT> def __init__(self, action_space, epsilon, alpha, gamma): <NEW_LINE> <INDENT> self.action_space = action_space <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.alpha = alpha <NEW_LINE> self.gamma = gamma <NEW_LINE> self.Q = {} <NEW_LINE> <DEDENT> def update(self, obs, obs_old, reward, done, action): <NEW_LINE> <INDENT> obs_old_hashed = hash64(obs_old).hexdigest() <NEW_LINE> obs_hashed = hash64(obs).hexdigest() <NEW_LINE> if not obs_hashed in self.Q: <NEW_LINE> <INDENT> self.Q[obs_hashed] = np.zeros(len(self.action_space)) <NEW_LINE> <DEDENT> print('action = ', action) <NEW_LINE> if not done: <NEW_LINE> <INDENT> self.Q[obs_old_hashed][action] = self.Q[obs_old_hashed][action] + self.alpha * (reward + self.gamma * np.max(self.Q[obs_hashed]) - self.Q[obs_old_hashed][action]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.Q[obs_old_hashed][action] = self.Q[obs_old_hashed][action] + self.alpha * (reward + self.gamma * 0 - self.Q[obs_old_hashed][action]) <NEW_LINE> <DEDENT> <DEDENT> def act(self, observation, reward, done): <NEW_LINE> <INDENT> obs_hashed = hash64(observation).hexdigest() <NEW_LINE> if not obs_hashed in self.Q: <NEW_LINE> <INDENT> self.Q[obs_hashed] = np.zeros(len(self.action_space)) <NEW_LINE> <DEDENT> a_max = np.argmax(self.Q[obs_hashed]) <NEW_LINE> if random.random() > self.epsilon: <NEW_LINE> <INDENT> return (self.action_space[a_max], a_max) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> random_int = random.randint(0, len(self.action_space)-2) <NEW_LINE> return (self.action_space[random_int], random_int) if random_int < a_max else (self.action_space[random_int + 1], random_int + 1) | Q-Learning Algorithm | 6259905276d4e153a661dcee |
class DreamViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated, ) <NEW_LINE> queryset = Dream.objects.all() <NEW_LINE> serializer_class = DreamSerializer | API endpoint that allows dream to be created, viewed, edited or deleted. | 625990522ae34c7f260ac5cd |
class FloatField(Field): <NEW_LINE> <INDENT> def process_jsondata(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.data = float(value) <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> self.data = None <NEW_LINE> raise ValueError(self.gettext('Not a valid float value')) | A text field, except all input is coerced to an float. | 62599052379a373c97d9a50c |
class State(IntEnum): <NEW_LINE> <INDENT> strongly_not_taken = 0 <NEW_LINE> weakly_not_taken = 1 <NEW_LINE> weakly_taken = 2 <NEW_LINE> strongly_taken = 3 | Potential states of the branch predictor. | 6259905229b78933be26ab38 |
class Job(JobMetaClass('JobBase', (object,), {'abstract': True})): <NEW_LINE> <INDENT> abstract = True <NEW_LINE> timeout = None <NEW_LINE> expires = None <NEW_LINE> doc_syntax = 'markdown' <NEW_LINE> can_overlap = True <NEW_LINE> def __call__(self, consumer, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError("Jobs must implement the __call__ method.") <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return 'regular' <NEW_LINE> <DEDENT> def queue_task(self, consumer, jobname, meta_params=None, **kwargs): <NEW_LINE> <INDENT> if meta_params is None: <NEW_LINE> <INDENT> meta_params = {} <NEW_LINE> <DEDENT> meta_params['from_task'] = consumer.task_id <NEW_LINE> return consumer.backend.queue_task(jobname, meta_params=meta_params, **kwargs) | The Job class which is used in a distributed task queue.
.. attribute:: name
The unique name which defines the Job and which can be used to retrieve
it from the job registry. This attribute is set to the Job class name
in lower case by default, unless a ``name`` class attribute is defined.
.. attribute:: abstract
If set to ``True`` (default is ``False``), the :class:`.Job` won't be
registered with the :class:`.JobRegistry`. Useful when creating a new
base class for several other jobs.
.. attribute:: type
Type of Job, one of ``regular`` and ``periodic``.
.. attribute:: timeout
An instance of a datetime.timedelta or ``None``. If set, it represents the
time lag after which a task which did not start expires.
Default: ``None``.
.. attribute:: can_overlap
Boolean indicating if this job can generate overlapping tasks. It can
also be a callable which accept the same input parameters as the job
callable function.
Default: ``True``.
.. attribute:: doc_syntax
The doc string syntax.
Default: ``markdown``
.. attribute:: logger
an instance of a logger. Created at runtime. | 6259905216aa5153ce4019cd |
class Cie10(models.Model): <NEW_LINE> <INDENT> value = models.CharField("Valor", max_length=10) <NEW_LINE> description = models.CharField("Descripción", max_length=100) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Cie-10" <NEW_LINE> verbose_name_plural = "Cie-10" | This model represnt the cie-10 medical clasification for illness
http://es.wikipedia.org/wiki/CIE-10 | 625990522ae34c7f260ac5ce |
class Dotkeys(dict): <NEW_LINE> <INDENT> __var_name = re.compile('^[a-zA-Z_]+[a-zA-Z_0-9]*$') <NEW_LINE> def __dir__(self): <NEW_LINE> <INDENT> return [i for i in self if type(i) == str and self.__var_name.match(i)] <NEW_LINE> <DEDENT> def __getattribute__(self, key, *argv): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return dict.__getattribute__(self, key) <NEW_LINE> <DEDENT> except AttributeError as e: <NEW_LINE> <INDENT> if key == '__deepcopy__': <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> elif key[:4] == 'set_': <NEW_LINE> <INDENT> def set_value(value): <NEW_LINE> <INDENT> self[key[4:]] = value <NEW_LINE> return self <NEW_LINE> <DEDENT> return set_value <NEW_LINE> <DEDENT> elif key in self: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict.__setattr__(self, key, value) <NEW_LINE> <DEDENT> <DEDENT> def __delattr__(self, key): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> del self[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict.__delattr__(self, key) | This is a sick-minded hack of dict, intended to be an eye-candy.
It allows to get dict's items byt dot reference:
ipdb["lo"] == ipdb.lo
ipdb["eth0"] == ipdb.eth0
Obviously, it will not work for some cases, like unicode names
of interfaces and so on. Beside of that, it introduces some
complexity.
But it simplifies live for old-school admins, who works with good
old "lo", "eth0", and like that naming schemes. | 625990524428ac0f6e659a20 |
class IWSSetResourceAvailability(Interface): <NEW_LINE> <INDENT> pass | Marker interface | 6259905226068e7796d4de2f |
class AccountInvoiceDiscountTestCase(ModuleTestCase): <NEW_LINE> <INDENT> module = 'account_invoice_discount' | Test Account Invoice Discount module | 62599052a79ad1619776b531 |
class SeaLevels(ListAPIView): <NEW_LINE> <INDENT> renderer_classes = replace_json_renderer(ListAPIView.renderer_classes) <NEW_LINE> serializer_class = SeaLevelSerializer <NEW_LINE> def get_queryset(self, query_params=None, *args, **kwargs): <NEW_LINE> <INDENT> if query_params is None: <NEW_LINE> <INDENT> query_params = self.request.query_params <NEW_LINE> <DEDENT> interval_mins = parse_interval(query_params.get('interval', '1')) <NEW_LINE> return parse_and_get_queryset( self.kwargs.get('location_slug', None), query_params.get('start', None), query_params.get('end', None) )[:24 * 60:interval_mins] | Get tidal predictions at a given location. Valid parameters are
`start` and `end` (in format `2014-05-01T00:17:00Z`) and `interval` in
minutes. | 62599052e64d504609df9e44 |
class Products(db.Model): <NEW_LINE> <INDENT> __bind_key__ = 'tysql' <NEW_LINE> __tablename__ = 'products' <NEW_LINE> prod_id = db.Column(db.String(10), primary_key=True, nullable=False, comment='产品ID') <NEW_LINE> prod_name = db.Column(db.String(255), nullable=False, comment='产品名') <NEW_LINE> prod_price = db.Column(db.DECIMAL(8, 2), nullable=False, comment='产品价格') <NEW_LINE> prod_desc = db.Column(db.Text, nullable=True, comment='产品描述') <NEW_LINE> vend_id = db.Column(db.String(10), db.ForeignKey('vendors.vend_id'), comment='产品供应商ID') <NEW_LINE> orderitems = db.relationship('OrderItems', backref='product', lazy='dynamic') <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Product %r>' % self.prod_name | 产品信息 | 625990524a966d76dd5f03d7 |
class Results(Page): <NEW_LINE> <INDENT> def vars_for_template(self): <NEW_LINE> <INDENT> return { 'total_earnings': self.group.total_contribution * (self.group.mpcr * Constants.players_per_group), } | Players payoff: How much each has earned | 6259905263b5f9789fe8665a |
class ApiUserDeleteHandler(ApiBaseHandler): <NEW_LINE> <INDENT> def __init__(self, application, request, **kwargs): <NEW_LINE> <INDENT> super().__init__(application, request, **kwargs) <NEW_LINE> self.username = self.get_argument("username", "") <NEW_LINE> <DEDENT> @auth_token <NEW_LINE> @gen.coroutine <NEW_LINE> def post(self, *args, **kwargs): <NEW_LINE> <INDENT> if type(self.username) == list: <NEW_LINE> <INDENT> flag = yield del_user_list(self) <NEW_LINE> self.change_to_jsonp(flag) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flag = yield del_user_one(self) <NEW_LINE> self.change_to_jsonp(flag) | 删除一个用户类 | 625990524e4d5625663738ef |
class Sensor: <NEW_LINE> <INDENT> def __init__(self, car: Car, space: pymunk.Space, angle: float, max_measure_dist: int=60): <NEW_LINE> <INDENT> self._car = car <NEW_LINE> self._angle = angle + self._car.shape.body.angle - radians(90) <NEW_LINE> self._max_measure_dist = max_measure_dist <NEW_LINE> self._line_one, self._line_two = self.draw_sensor(space) <NEW_LINE> <DEDENT> def draw_sensor(self, space: pymunk.Space, sensor_size: int=10) -> pymunk.Shape: <NEW_LINE> <INDENT> cross_body = pymunk.Body(body_type=pymunk.Body.DYNAMIC) <NEW_LINE> line_one = pymunk.Segment(cross_body, self.position + Coordinate(0, sensor_size), self.position + Coordinate(sensor_size, 0), 2) <NEW_LINE> line_two = pymunk.Segment(cross_body, self.position, self.position + Coordinate(sensor_size, sensor_size), 2) <NEW_LINE> line_one.color, line_two.color = pygame.color.THECOLORS['green'], pygame.color.THECOLORS['green'] <NEW_LINE> line_one.collision_type, line_two.collision_type = COLLISION_TYPES['sensor'], COLLISION_TYPES['sensor'] <NEW_LINE> line_one.sensor, line_two.sensor = True, True <NEW_LINE> space.add([line_one, line_two]) <NEW_LINE> return line_one, line_two <NEW_LINE> <DEDENT> def update_position(sensor) -> Coordinate: <NEW_LINE> <INDENT> anchor = sensor.car.sensor_anchor <NEW_LINE> return Coordinate(*rotate_around_center(anchor, anchor + Coordinate(*(sensor._max_measure_dist, 0)), sensor._angle)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def interset_tester_collision_h(arbiter, space, data): <NEW_LINE> <INDENT> s1, s2 = arbiter.shapes <NEW_LINE> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def car(self): <NEW_LINE> <INDENT> return self._car <NEW_LINE> <DEDENT> @property <NEW_LINE> def position(self) -> Coordinate: <NEW_LINE> <INDENT> if not hasattr(self, '_position'): <NEW_LINE> <INDENT> self._position = self.update_position() <NEW_LINE> <DEDENT> return self._position <NEW_LINE> <DEDENT> @position.setter <NEW_LINE> def position(self, new_pos: Coordinate) -> None: <NEW_LINE> <INDENT> self._position = new_pos <NEW_LINE> self._line_one.body.position, self._line_two.body.position = new_pos, new_pos | Class for a Sensor. | 625990527d847024c075d8c3 |
class EditorCalltipsPage(ConfigurationPageBase, Ui_EditorCalltipsPage): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ConfigurationPageBase.__init__(self) <NEW_LINE> self.setupUi(self) <NEW_LINE> self.setObjectName("EditorCalltipsPage") <NEW_LINE> self.ctEnabledCheckBox.setChecked( Preferences.getEditor("CallTipsEnabled")) <NEW_LINE> self.ctVisibleSlider.setValue( Preferences.getEditor("CallTipsVisible")) <NEW_LINE> self.callTipsBackgroundColour = self.initColour("CallTipsBackground", self.calltipsBackgroundButton, Preferences.getEditorColour) <NEW_LINE> self.ctScintillaCheckBox.setChecked( Preferences.getEditor("CallTipsScintillaOnFail")) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> Preferences.setEditor("CallTipsEnabled", int(self.ctEnabledCheckBox.isChecked())) <NEW_LINE> Preferences.setEditor("CallTipsVisible", self.ctVisibleSlider.value()) <NEW_LINE> Preferences.setEditorColour("CallTipsBackground", self.callTipsBackgroundColour) <NEW_LINE> Preferences.setEditor("CallTipsScintillaOnFail", int(self.ctScintillaCheckBox.isChecked())) <NEW_LINE> <DEDENT> @pyqtSignature("") <NEW_LINE> def on_calltipsBackgroundButton_clicked(self): <NEW_LINE> <INDENT> self.callTipsBackgroundColour = self.selectColour(self.calltipsBackgroundButton, self.callTipsBackgroundColour) | Class implementing the Editor Calltips configuration page. | 625990526e29344779b01b31 |
class Recording(Util): <NEW_LINE> <INDENT> def __init__(self, starttime, recordpath, frequency, length, gain=50, uniques=None): <NEW_LINE> <INDENT> self.starttime = datetime.datetime.strptime(starttime, "%m/%d/%Y %H:%M") <NEW_LINE> self.recordpath = recordpath <NEW_LINE> self.frequency = float(frequency) <NEW_LINE> self.length = int(length) <NEW_LINE> self.gain = int(gain) <NEW_LINE> self.uniques = uniques <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{} {}'.format(self.recordpath, self.starttime.isoformat()) | Defines everything you need to know to schedule a record | 62599052435de62698e9d2eb |
class RawPal(LfdFile): <NEW_LINE> <INDENT> _filepattern = r'.*\.(pal|raw|bin|lut)' <NEW_LINE> def _init(self): <NEW_LINE> <INDENT> if self._filesize not in (768, 1024): <NEW_LINE> <INDENT> raise LfdFile.Error(self) <NEW_LINE> <DEDENT> self.dtype = numpy.dtype('u1') <NEW_LINE> <DEDENT> def _data(self, order=None): <NEW_LINE> <INDENT> data = numpy.fromfile(self._fh, 'u1').reshape(256, -1) <NEW_LINE> if order is None: <NEW_LINE> <INDENT> a = data.astype('i4') <NEW_LINE> b = a.reshape(-1, 256).T <NEW_LINE> if (numpy.sum(numpy.abs(numpy.diff(a, axis=0))) > numpy.sum(numpy.abs(numpy.diff(b, axis=0)))): <NEW_LINE> <INDENT> data = data.reshape(-1, 256).T <NEW_LINE> <DEDENT> <DEDENT> elif order == 'F': <NEW_LINE> <INDENT> data = data.reshape(-1, 256).T <NEW_LINE> <DEDENT> elif order != 'C': <NEW_LINE> <INDENT> raise ValueError("unknown order", order) <NEW_LINE> <DEDENT> if data.shape[1] == 4 and numpy.all(data[:, 3] == 0): <NEW_LINE> <INDENT> data[:, 3] = 255 <NEW_LINE> <DEDENT> return data | Raw color palette.
PAL files contain a single RGB or RGBA color palette, stored as 256x3 or
256x4 unsigned bytes in C or Fortran order, without any header.
Examples
--------
>>> with RawPal('rgb.pal') as f:
... print(f.asarray()[100])
[ 16 255 239]
>>> with RawPal('rgba.pal') as f:
... print(f.asarray()[100])
[219 253 187 255]
>>> with RawPal('rrggbb.pal') as f:
... print(f.asarray()[100])
[182 114 91]
>>> with RawPal('rrggbbaa.pal') as f:
... print(f.asarray()[100])
[182 114 91 170]
>>> with RawPal('rrggbbaa.pal') as f:
... print(f.asarray('F')[100])
[182 114 91 170] | 625990527cff6e4e811b6f2a |
class FlightSearchFormView(GetFormMixin, FormView): <NEW_LINE> <INDENT> template_name = "index.haml" <NEW_LINE> form_class = FlightSearchForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> return redirect("%s?%s" % (reverse('search_results'), self.request.GET.urlencode())) | View that displays flight search form, if form is valid redirects
to `FlightSearchResultsView` or displays errors | 625990522ae34c7f260ac5cf |
class Formatter(object): <NEW_LINE> <INDENT> formatters = [] <NEW_LINE> separator = ' ' <NEW_LINE> show_headings = True <NEW_LINE> def __init__(self, name, desc): <NEW_LINE> <INDENT> Formatter.formatters.append(self) <NEW_LINE> self.name = name <NEW_LINE> self.desc = desc <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def PrintTypes(cls): <NEW_LINE> <INDENT> data = sorted([(x.name, x.desc) for x in cls.formatters]) <NEW_LINE> data.insert(0, ['Format', 'Description']) <NEW_LINE> AlignedFormatter.PrintRows(data) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def Get(cls, name): <NEW_LINE> <INDENT> for fmt in cls.formatters: <NEW_LINE> <INDENT> if fmt.name == name: <NEW_LINE> <INDENT> return fmt <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def GetWidths(cls, rows): <NEW_LINE> <INDENT> widths = [0 for _ in rows[0]] <NEW_LINE> max_column_width = 80 <NEW_LINE> if not cls.show_headings: <NEW_LINE> <INDENT> rows = rows[1:] <NEW_LINE> <DEDENT> for row in rows: <NEW_LINE> <INDENT> i = 0 <NEW_LINE> for col in row: <NEW_LINE> <INDENT> if col: <NEW_LINE> <INDENT> widths[i]= max(widths[i], min(max_column_width, len(str(col)))) <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> <DEDENT> return widths | A base class for an object that formats query results into a stream. | 6259905207f4c71912bb0923 |
class Pause: <NEW_LINE> <INDENT> def __init__(self, duration): <NEW_LINE> <INDENT> self._duration = 0x80 <NEW_LINE> self.duration = duration <NEW_LINE> <DEDENT> @property <NEW_LINE> def raw_value(self): <NEW_LINE> <INDENT> return self._duration <NEW_LINE> <DEDENT> @property <NEW_LINE> def duration(self): <NEW_LINE> <INDENT> return (self._duration & 0x7f) / 100.0 <NEW_LINE> <DEDENT> @duration.setter <NEW_LINE> def duration(self, duration): <NEW_LINE> <INDENT> if not 0.0 <= duration <= 1.27: <NEW_LINE> <INDENT> raise ValueError('Pause duration must be a value within 0.0-1.27!') <NEW_LINE> <DEDENT> self._duration = 0x80 | round(duration * 100.0) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{}({})".format(type(self).__qualname__, self.duration) | DRV2605 waveform sequence timed delay. | 62599052e76e3b2f99fd9ee8 |
class _KnuthF: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = np.array(data, copy=True) <NEW_LINE> if self.data.ndim != 1: <NEW_LINE> <INDENT> raise ValueError("data should be 1-dimensional") <NEW_LINE> <DEDENT> self.data.sort() <NEW_LINE> self.n = self.data.size <NEW_LINE> from scipy import special <NEW_LINE> self.gammaln = special.gammaln <NEW_LINE> <DEDENT> def bins(self, M): <NEW_LINE> <INDENT> return np.linspace(self.data[0], self.data[-1], int(M) + 1) <NEW_LINE> <DEDENT> def __call__(self, M): <NEW_LINE> <INDENT> return self.eval(M) <NEW_LINE> <DEDENT> def eval(self, M): <NEW_LINE> <INDENT> M = int(M) <NEW_LINE> if M <= 0: <NEW_LINE> <INDENT> return np.inf <NEW_LINE> <DEDENT> bins = self.bins(M) <NEW_LINE> nk, bins = np.histogram(self.data, bins) <NEW_LINE> return -(self.n * np.log(M) + self.gammaln(0.5 * M) - M * self.gammaln(0.5) - self.gammaln(self.n + 0.5 * M) + np.sum(self.gammaln(nk + 0.5))) | Class which implements the function minimized by knuth_bin_width
Parameters
----------
data : array-like, one dimension
data to be histogrammed
Notes
-----
the function F is given by
.. math::
F(M|x,I) = n\log(M) + \log\Gamma(\frac{M}{2})
- M\log\Gamma(\frac{1}{2})
- \log\Gamma(\frac{2n+M}{2})
+ \sum_{k=1}^M \log\Gamma(n_k + \frac{1}{2})
where :math:`\Gamma` is the Gamma function, :math:`n` is the number of
data points, :math:`n_k` is the number of measurements in bin :math:`k`.
See Also
--------
knuth_bin_width | 625990522ae34c7f260ac5d0 |
class StringSerializePipeline(SerializePipeline): <NEW_LINE> <INDENT> pass | StringSerializePipeline
.. seealso::
:class:`kim.pipelines.serialization.SerializePipeline` | 62599052a219f33f346c7cee |
@dataclasses.dataclass(order=True, frozen=True) <NEW_LINE> class Lang: <NEW_LINE> <INDENT> lang: str <NEW_LINE> country: str = None <NEW_LINE> def __post_init__(self): <NEW_LINE> <INDENT> assert all(len(x) == 2 for x in filter(None, [self.lang, self.country])), self <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '_'.join(filter(None, [self.lang, self.country])) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, s): <NEW_LINE> <INDENT> return cls(*s.split('_')) | Language (and maybe country) code. | 6259905276e4537e8c3f0a75 |
class BaseRelation(object): <NEW_LINE> <INDENT> def __init__(self, item, status, relation_type=None): <NEW_LINE> <INDENT> if status not in ('attached', 'detached'): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> self.item = item <NEW_LINE> self.status = status <NEW_LINE> self.relation_type = relation_type <NEW_LINE> <DEDENT> def is_attached(self): <NEW_LINE> <INDENT> return self.status == 'attached' <NEW_LINE> <DEDENT> def is_detached(self): <NEW_LINE> <INDENT> return not self.is_attached() <NEW_LINE> <DEDENT> def set_as_detached(self): <NEW_LINE> <INDENT> self.status = 'detached' <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.item == other.item and self.status == other.status and self.relation_type == other.relation_type) | Class for all basic relations. | 625990523eb6a72ae038bb49 |
class CourseCommentView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def get(self, request, course_id): <NEW_LINE> <INDENT> course = Course.objects.get(id=int(course_id)) <NEW_LINE> user_courses = UserCourse.objects.filter(course=course) <NEW_LINE> user_ids = [user_course.user.id for user_course in user_courses] <NEW_LINE> all_user_courses = UserCourse.objects.filter(user_id__in=user_ids) <NEW_LINE> course_ids = [user_course.course.id for user_course in all_user_courses] <NEW_LINE> relate_courses = Course.objects.filter(id__in=course_ids).order_by("-click_nums")[:5] <NEW_LINE> all_resources = CourseResource.objects.filter(course=course) <NEW_LINE> all_comments = CourseComments.objects.filter(course__id=course_id) <NEW_LINE> return render(request, "course-comment.html", { "course": course, "course_resources": all_resources, "all_comments": all_comments, "relate_courses": relate_courses, }) | 课程评论 | 62599052a8ecb03325872701 |
class Chunk(object): <NEW_LINE> <INDENT> def __init__(self, nbt): <NEW_LINE> <INDENT> chunk_data = nbt['Level'] <NEW_LINE> self.coords = chunk_data['xPos'],chunk_data['zPos'] <NEW_LINE> self.blocks = BlockArray(chunk_data['Blocks'].value, chunk_data['Data'].value) <NEW_LINE> <DEDENT> def get_coords(self): <NEW_LINE> <INDENT> return (self.coords[0].value, self.coords[1].value) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Chunk({0},{1})".format(str(self.coords[0]), str(self.coords[1])) | Class for representing a single chunk. | 62599052cb5e8a47e493cbfc |
class Subject(BaseModel): <NEW_LINE> <INDENT> name = models.CharField( verbose_name = u'Название', max_length = 254, ) <NEW_LINE> short_name = models.CharField( verbose_name = u'Короткое_название', max_length = 25, ) <NEW_LINE> description = models.TextField( verbose_name = u'Описание' ) <NEW_LINE> created_at = models.DateTimeField( verbose_name = u'Дата создания', auto_now_add = True ) <NEW_LINE> status = models.CharField( verbose_name = u'Статус', max_length = 50, choices=STATUS, default=STATUS[0][0] ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = u'Предмет' <NEW_LINE> verbose_name_plural = u'Предметы' | Предмет | 62599052e64d504609df9e45 |
class ConnectionThread(NetworkThread): <NEW_LINE> <INDENT> def __init__(self, url, timeout): <NEW_LINE> <INDENT> NetworkThread.__init__(self) <NEW_LINE> self.url = url <NEW_LINE> self.timeout = timeout <NEW_LINE> self.conn = None <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> info = self.connect() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> info = TaskHeadError(self.url.host, 0) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.data_queue.put(info) <NEW_LINE> self.ready.set() <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def connect(self): pass <NEW_LINE> @abstractproperty <NEW_LINE> def protocol(self): pass | Abstract base class for connection threads. | 625990538e71fb1e983bcfb4 |
class ServosDriver(object): <NEW_LINE> <INDENT> def __init__(self, panpin, tiltpin, idletimeout, minsteps, maxsteps, panmaxangle, tiltmaxangle): <NEW_LINE> <INDENT> self._panpin = panpin <NEW_LINE> self._tiltpin = tiltpin <NEW_LINE> self._idletimeout = idletimeout <NEW_LINE> self._minsteps = minsteps <NEW_LINE> self._maxsteps = maxsteps <NEW_LINE> self.pan_servo = Servo(self._panpin, self._minsteps, self._maxsteps, panmaxangle) <NEW_LINE> self.tilt_servo = Servo(self._tiltpin, self._minsteps, self._maxsteps, tiltmaxangle) <NEW_LINE> self._initialized = False <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> if not self._initialized: <NEW_LINE> <INDENT> path = os.path.split(os.path.realpath(__file__))[0] <NEW_LINE> command = ('/servod --idle-timeout=%s --min=%s --max=%s ' '--p1pins="%s,%s" > /dev/null') % (self._idletimeout, self._minsteps, self._maxsteps, self._panpin, self._tiltpin) <NEW_LINE> os.system(path + command) <NEW_LINE> self.pan_servo.init() <NEW_LINE> self.tilt_servo.init() <NEW_LINE> self._initialized = True <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> if self._initialized: <NEW_LINE> <INDENT> self.pan_servo.cleanup() <NEW_LINE> self.tilt_servo.cleanup() <NEW_LINE> os.system('sudo killall servod') <NEW_LINE> self._initialized = False | Class which starts the servod blaster and configures it
It also initiates servos which are connected (pan and tilt) | 6259905338b623060ffaa2c4 |
class SpatialSoftmax(nn.Module): <NEW_LINE> <INDENT> def __init__(self, height, width, channel): <NEW_LINE> <INDENT> super(SpatialSoftmax, self).__init__() <NEW_LINE> self.height = height <NEW_LINE> self.width = width <NEW_LINE> self.channel = channel <NEW_LINE> pos_x, pos_y = np.meshgrid( np.linspace(-1., 1., self.height), np.linspace(-1., 1., self.width)) <NEW_LINE> pos_x = torch.from_numpy(pos_x.reshape(self.height*self.width)).float() <NEW_LINE> pos_y = torch.from_numpy(pos_y.reshape(self.height*self.width)).float() <NEW_LINE> self.register_buffer('pos_x', pos_x) <NEW_LINE> self.register_buffer('pos_y', pos_y) <NEW_LINE> <DEDENT> def forward(self, feature): <NEW_LINE> <INDENT> feature = feature.view(-1, self.height*self.width) <NEW_LINE> softmax_attention = F.softmax(feature, dim=-1) <NEW_LINE> expected_x = torch.sum(self.pos_x*softmax_attention, dim=1, keepdim=True) <NEW_LINE> expected_y = torch.sum(self.pos_y*softmax_attention, dim=1, keepdim=True) <NEW_LINE> expected_xy = torch.cat([expected_x, expected_y], 1) <NEW_LINE> feature_keypoints = expected_xy.view(-1, self.channel*2) <NEW_LINE> return feature_keypoints | Spatial Softmax Implementation | 625990537d847024c075d8c5 |
class GenNoteClear(Operator): <NEW_LINE> <INDENT> bl_idname = "node.gen_note_clear" <NEW_LINE> bl_label = "Clear Text" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> node = context.node <NEW_LINE> node.clear() <NEW_LINE> return {'FINISHED'} | Clear Note Node | 62599053d99f1b3c44d06b8a |
class _WeightedSparseColumn(_FeatureColumn, collections.namedtuple( "_WeightedSparseColumn", ["sparse_id_column", "weight_column_name", "dtype"])): <NEW_LINE> <INDENT> def __new__(cls, sparse_id_column, weight_column_name, dtype): <NEW_LINE> <INDENT> return super(_WeightedSparseColumn, cls).__new__(cls, sparse_id_column, weight_column_name, dtype) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "{}_weighted_by_{}".format(self.sparse_id_column.name, self.weight_column_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def length(self): <NEW_LINE> <INDENT> return self.sparse_id_column.length <NEW_LINE> <DEDENT> @property <NEW_LINE> def config(self): <NEW_LINE> <INDENT> config = _get_feature_config(self.sparse_id_column) <NEW_LINE> config.update( {self.weight_column_name: parsing_ops.VarLenFeature(self.dtype)}) <NEW_LINE> return config <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return "{}".format(self) <NEW_LINE> <DEDENT> def insert_transformed_feature(self, columns_to_tensors): <NEW_LINE> <INDENT> if self.sparse_id_column not in columns_to_tensors: <NEW_LINE> <INDENT> self.sparse_id_column.insert_transformed_feature(columns_to_tensors) <NEW_LINE> <DEDENT> weight_tensor = columns_to_tensors[self.weight_column_name] <NEW_LINE> if not isinstance(weight_tensor, sparse_tensor_py.SparseTensor): <NEW_LINE> <INDENT> weight_tensor = contrib_sparse_ops.dense_to_sparse_tensor(weight_tensor) <NEW_LINE> <DEDENT> if not self.dtype.is_floating: <NEW_LINE> <INDENT> weight_tensor = math_ops.to_float(weight_tensor) <NEW_LINE> <DEDENT> columns_to_tensors[self] = tuple([ columns_to_tensors[self.sparse_id_column], weight_tensor ]) <NEW_LINE> <DEDENT> def id_tensor(self, input_tensor): <NEW_LINE> <INDENT> return input_tensor[0] <NEW_LINE> <DEDENT> def weight_tensor(self, input_tensor): <NEW_LINE> <INDENT> return input_tensor[1] <NEW_LINE> <DEDENT> def _to_dnn_input_layer(self, input_tensor, weight_collections=None, trainable=True, output_rank=2): <NEW_LINE> <INDENT> raise ValueError( "WeightedSparseColumn is not supported in DNN. " "Please use embedding_column or one_hot_column. column: {}".format( self)) <NEW_LINE> <DEDENT> def _wide_embedding_lookup_arguments(self, input_tensor): <NEW_LINE> <INDENT> return _LinearEmbeddingLookupArguments( input_tensor=self.id_tensor(input_tensor), weight_tensor=self.weight_tensor(input_tensor), vocab_size=self.length, initializer=init_ops.zeros_initializer(), combiner=self.sparse_id_column.combiner) | See `weighted_sparse_column`. | 6259905321a7993f00c67458 |
class FileTmpdirPermissionError(FileError): <NEW_LINE> <INDENT> pass | File tmpdir permission error class. | 625990538da39b475be046d6 |
class LiveThreadContribution(object): <NEW_LINE> <INDENT> def __init__(self, thread): <NEW_LINE> <INDENT> self.thread = thread <NEW_LINE> <DEDENT> def add(self, body): <NEW_LINE> <INDENT> url = API_PATH['live_add_update'].format(id=self.thread.id) <NEW_LINE> self.thread._reddit.post(url, data={'body': body}) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> url = API_PATH['live_close'].format(id=self.thread.id) <NEW_LINE> self.thread._reddit.post(url) <NEW_LINE> <DEDENT> def update(self, title=None, description=None, nsfw=None, resources=None, **other_settings): <NEW_LINE> <INDENT> settings = {'title': title, 'description': description, 'nsfw': nsfw, 'resources': resources} <NEW_LINE> settings.update(other_settings) <NEW_LINE> if all(value is None for value in settings.values()): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> thread = LiveThread(self.thread._reddit, self.thread.id) <NEW_LINE> data = {key: getattr(thread, key) if value is None else value for key, value in settings.items()} <NEW_LINE> url = API_PATH['live_update_thread'].format(id=self.thread.id) <NEW_LINE> self.thread._reddit.post(url, data=data.copy()) <NEW_LINE> self.thread._reset_attributes(*data.keys()) | Provides a set of contribution functions to a LiveThread. | 62599053dc8b845886d54aaf |
class Agency: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.url = None <NEW_LINE> self.timezone = None <NEW_LINE> self.lang = None <NEW_LINE> self.phone = None <NEW_LINE> self.fare_url = None <NEW_LINE> self.email = None | contents of the **agency.txt** file (from https://developers.google.com/transit/gtfs/reference/)
Fields
______
* **id** `(agency_id)` **Optional** - The agency_id field is an ID that uniquely identifies a transit agency. A transit feed may represent data from more than one agency. The agency_id is dataset unique. This field is optional for transit feeds that only contain data for a single agency.
* **name** `(agency_name)` **Required** - The agency_name field contains the full name of the transit agency. Google Maps will display this name.
* **url** `(agency_url)` **Required** - The agency_url field contains the URL of the transit agency. The value must be a fully qualified URL that includes http:// or https://, and any special characters in the URL must be correctly escaped. See http://www.w3.org/Addressing/URL/4_URI_Recommentations.html for a description of how to create fully qualified URL values.
* **timezone** `(agency_timezone)` **Required** - The agency_timezone field contains the timezone where the transit agency is located. Timezone names never contain the space character but may contain an underscore. Please refer to http://en.wikipedia.org/wiki/List_of_tz_zones for a list of valid values. If multiple agencies are specified in the feed, each must have the same agency_timezone.
* **lang** `(agency_lang)` **Optional** - The agency_lang field contains a two-letter ISO 639-1 code for the primary language used by this transit agency. The language code is case-insensitive (both en and EN are accepted). This setting defines capitalization rules and other language-specific settings for all text contained in this transit agency's feed. Please refer to http://www.loc.gov/standards/iso639-2/php/code_list.php for a list of valid values.
* **phone** `(agency_phone)` **Optional** - The agency_phone field contains a single voice telephone number for the specified agency. This field is a string value that presents the telephone number as typical for the agency's service area. It can and should contain punctuation marks to group the digits of the number. Dialable text (for example, TriMet's "503-238-RIDE") is permitted, but the field must not contain any other descriptive text.
* **fare_url** `(agency_url)` **Optional** - The agency_fare_url specifies the URL of a web page that allows a rider to purchase tickets or other fare instruments for that agency online. The value must be a fully qualified URL that includes http:// or https://, and any special characters in the URL must be correctly escaped. See http://www.w3.org/Addressing/URL/4_URI_Recommentations.html for a description of how to create fully qualified URL values.
* **email** `(agency_email)` **Optional** - Contains a single valid email address actively monitored by the agency’s customer service department. This email address will be considered a direct contact point where transit riders can reach a customer service representative at the agency. | 62599053f7d966606f74932d |
class Player: <NEW_LINE> <INDENT> def __init__(self, inventory = {}): <NEW_LINE> <INDENT> self.inventory = inventory | A player | 62599053379a373c97d9a510 |
class MappingContext(object): <NEW_LINE> <INDENT> def __init__(self, rxnorm, treatment, drug_problem=None): <NEW_LINE> <INDENT> self._rxnorm=rxnorm <NEW_LINE> self._treatment=treatment <NEW_LINE> self._drug_problem = drug_problem <NEW_LINE> concept_names = {} <NEW_LINE> for c in rxnorm.concepts: <NEW_LINE> <INDENT> cn = rxnorm.concepts[c]._name.lower() <NEW_LINE> cn = cn.split('@')[0].strip() <NEW_LINE> if cn in concept_names: <NEW_LINE> <INDENT> concept_names[cn].add(c) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> concept_names[cn] = set([c]) <NEW_LINE> <DEDENT> <DEDENT> self._concept_names=concept_names <NEW_LINE> <DEDENT> @property <NEW_LINE> def rxnorm(self): <NEW_LINE> <INDENT> return self._rxnorm <NEW_LINE> <DEDENT> @property <NEW_LINE> def treatment(self): <NEW_LINE> <INDENT> return self._treatment <NEW_LINE> <DEDENT> @property <NEW_LINE> def concept_names(self): <NEW_LINE> <INDENT> return self._concept_names <NEW_LINE> <DEDENT> @property <NEW_LINE> def drug_problem(self): <NEW_LINE> <INDENT> return self._drug_problem <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> rxcount = len(self._rxnorm.concepts) <NEW_LINE> tscount = len(self._treatment) <NEW_LINE> dpcount = len(self._drug_problem._drug_problem_dict) <NEW_LINE> return "<MappingContext RXNORM: %d; treats: %d; drug/problem: %d; 0x%x>" % (rxcount, tscount, dpcount, id(self),) | Packages the information needed to map medications to each other and the
UMLS. | 62599053b5575c28eb713741 |
class Merger(Thread): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, config, work_queue, pipe): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.merge_logger = get_logger( '{name}_merge'.format(name=self.config.name), redirect_to_file=True) <NEW_LINE> self.work_queue = work_queue <NEW_LINE> self.pipe = pipe <NEW_LINE> self.publisher = db_publisher.DBPublisher(name=self.config.name) <NEW_LINE> Thread.__init__(self) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def run(self): <NEW_LINE> <INDENT> return | Merger is the base class for all mergers.
| 6259905355399d3f05627a0a |
class VideoUploader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._session = None <NEW_LINE> <DEDENT> def upload(self, video, wait_for_encoding=False): <NEW_LINE> <INDENT> if self._session: <NEW_LINE> <INDENT> raise FacebookError( "There is already an upload session for this video uploader" ) <NEW_LINE> <DEDENT> self._session = VideoUploadSession(video, wait_for_encoding) <NEW_LINE> result = self._session.start() <NEW_LINE> self._session = None <NEW_LINE> return result | Video Uploader that can upload videos to adaccount | 62599053be383301e0254d02 |
class Limit(object): <NEW_LINE> <INDENT> def __init__(self, limit=None): <NEW_LINE> <INDENT> self.data = limit <NEW_LINE> self.stub = Cuebot.getStub('limit') <NEW_LINE> <DEDENT> def create(self): <NEW_LINE> <INDENT> return Limit(self.stub.Create( limit_pb2.LimitCreateRequest(name=self.name(), max_value=self.maxValue()), timeout=Cuebot.Timeout)) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> self.stub.Delete(limit_pb2.LimitDeleteRequest(name=self.name()), timeout=Cuebot.Timeout) <NEW_LINE> <DEDENT> def find(self, name): <NEW_LINE> <INDENT> return Limit(self.stub.Find(limit_pb2.LimitFindRequest(name=name), timeout=Cuebot.Timeout).limit) <NEW_LINE> <DEDENT> def get(self, id): <NEW_LINE> <INDENT> return Limit(self.stub.Get(limit_pb2.LimitGetRequest(id=id), timeout=Cuebot.Timeout).limit) <NEW_LINE> <DEDENT> def rename(self, newName): <NEW_LINE> <INDENT> self.stub.Rename(limit_pb2.LimitRenameRequest(old_name=self.name(), new_name=newName), timeout=Cuebot.Timeout) <NEW_LINE> self._update() <NEW_LINE> <DEDENT> def setMaxValue(self, maxValue): <NEW_LINE> <INDENT> self.stub.SetMaxValue(limit_pb2.LimitSetMaxValueRequest(name=self.name(), max_value=maxValue), timeout=Cuebot.Timeout) <NEW_LINE> self._update() <NEW_LINE> <DEDENT> def _update(self): <NEW_LINE> <INDENT> self.data = self.stub.Get(limit_pb2.LimitGetRequest(id=self.id()), timeout=Cuebot.Timeout) <NEW_LINE> <DEDENT> def id(self): <NEW_LINE> <INDENT> return self.data.id <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> if hasattr(self.data, 'name'): <NEW_LINE> <INDENT> return self.data.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> <DEDENT> def maxValue(self): <NEW_LINE> <INDENT> if hasattr(self.data, 'max_value'): <NEW_LINE> <INDENT> return self.data.max_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def currentRunning(self): <NEW_LINE> <INDENT> if hasattr(self.data, 'current_running'): <NEW_LINE> <INDENT> return self.data.current_running <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 | This class contains the grpc implementation related to a Limit. | 62599053596a897236129025 |
class OrganizationAddress(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'organization_addresses' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> organization_id = db.Column(db.ForeignKey( 'organizations.id', ondelete='cascade'), nullable=False) <NEW_LINE> address_id = db.Column(db.ForeignKey( 'addresses.id', ondelete='cascade'), nullable=False) <NEW_LINE> __table_args__ = (UniqueConstraint( 'organization_id', 'address_id', name='_organization_address'),) | link table for organization : n addresses | 6259905394891a1f408ba16c |
class ConfirmFileOverwriteDialog(QDialog): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ConfirmFileOverwriteDialog, self).__init__(*args, **kwargs) <NEW_LINE> self.setWindowTitle("Overwrite current file") <NEW_LINE> message = "You are trying to open a new file. <br>" "Any existing manipulations of the current <br> data set will be <b>lost</b>." <NEW_LINE> self.label = QLabel(message) <NEW_LINE> btn = QDialogButtonBox.Ok | QDialogButtonBox.Cancel <NEW_LINE> self.buttonBox = QDialogButtonBox(btn) <NEW_LINE> self.buttonBox.accepted.connect(self.accept) <NEW_LINE> self.buttonBox.rejected.connect(self.reject) <NEW_LINE> self.layout = QVBoxLayout() <NEW_LINE> self.layout.addWidget(self.label) <NEW_LINE> self.layout.addWidget(self.buttonBox) <NEW_LINE> self.setLayout(self.layout) <NEW_LINE> w = 300 <NEW_LINE> h = 200 <NEW_LINE> self.setFixedSize(w, h) | Dialog to display when user wants to open a new file if a file is currently open. | 62599053b830903b9686eef3 |
class Rate(object): <NEW_LINE> <INDENT> PERIODS = { 's': 1, 'm': 60, 'h': 60 * 60, 'd': 24 * 60 * 60, } <NEW_LINE> RATE_RE = re.compile(r'(\d+)/(\d*)([smhd])?') <NEW_LINE> @classmethod <NEW_LINE> def parse(cls, rate_str): <NEW_LINE> <INDENT> m = Rate.RATE_RE.match(rate_str) <NEW_LINE> if m: <NEW_LINE> <INDENT> count, multiplier, period = m.groups() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Could not parse given rate: %s.' % rate_str) <NEW_LINE> <DEDENT> seconds = Rate.PERIODS[period or 's'] <NEW_LINE> if multiplier: <NEW_LINE> <INDENT> seconds *= int(multiplier) <NEW_LINE> <DEDENT> return cls(count=int(count), seconds=seconds) <NEW_LINE> <DEDENT> def __init__(self, count, seconds): <NEW_LINE> <INDENT> self.count = count <NEW_LINE> self.seconds = seconds <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (isinstance(other, Rate) and self.count == other.count and self.seconds == other.seconds) | A rate representing login attempt frequency.
The main functionality of this class is found in the :py:meth:`parse`
function. This class converts a rate into a Rate object, which
contains the number of login attempts allowed within a time period based
on a given rate string. | 625990534e4d5625663738f3 |
class Server(object): <NEW_LINE> <INDENT> auth = HTTPBasicAuth() <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.process = None <NEW_LINE> self.app = Flask(__name__) <NEW_LINE> self.api = Api(self.app) <NEW_LINE> self.context = ('appqos.crt', 'appqos.key') <NEW_LINE> self.api.add_resource(Apps, '/apps') <NEW_LINE> self.api.add_resource(App, '/apps/<app_id>') <NEW_LINE> self.api.add_resource(Pools, '/pools') <NEW_LINE> self.api.add_resource(Pool, '/pools/<pool_id>') <NEW_LINE> self.api.add_resource(Stats, '/stats') <NEW_LINE> self.api.add_resource(Caps, '/caps') <NEW_LINE> self.app.register_error_handler(RestError, Server.error_handler) <NEW_LINE> <DEDENT> def start(self, host, port, debug=False): <NEW_LINE> <INDENT> for ssl_ctx_file in self.context: <NEW_LINE> <INDENT> if not os.path.isfile(ssl_ctx_file): <NEW_LINE> <INDENT> log.error("SSL cert or key file missing.") <NEW_LINE> return -1 <NEW_LINE> <DEDENT> <DEDENT> self.process = multiprocessing.Process(target=self.app.run, kwargs={'host': host, 'port': port, 'ssl_context': self.context, 'debug': debug, 'use_reloader': False, 'processes': 1}) <NEW_LINE> self.process.start() <NEW_LINE> return 0 <NEW_LINE> <DEDENT> def terminate(self): <NEW_LINE> <INDENT> os.kill(self.process.pid, signal.SIGINT) <NEW_LINE> sleep(1) <NEW_LINE> if self.process.is_alive(): <NEW_LINE> <INDENT> self.process.terminate() <NEW_LINE> <DEDENT> self.process.join() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def error_handler(error): <NEW_LINE> <INDENT> response = {"message": error.message} <NEW_LINE> return json.dumps(response), error.code <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @auth.verify_password <NEW_LINE> def verify(username, password): <NEW_LINE> <INDENT> if not (username and password): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if 'auth' in common.CONFIG_STORE.get_config(): <NEW_LINE> <INDENT> if username == common.CONFIG_STORE.get_config()['auth']['username'] and password == common.CONFIG_STORE.get_config()['auth']['password']: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False | REST API server | 6259905376d4e153a661dcf1 |
class UniqueFileTest(test_util.TempDirTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(UniqueFileTest, self).setUp() <NEW_LINE> self.default_name = os.path.join(self.tempdir, "foo.txt") <NEW_LINE> <DEDENT> def _call(self, mode=0o600): <NEW_LINE> <INDENT> from certbot.util import unique_file <NEW_LINE> return unique_file(self.default_name, mode) <NEW_LINE> <DEDENT> def test_returns_fd_for_writing(self): <NEW_LINE> <INDENT> fd, name = self._call() <NEW_LINE> fd.write("bar") <NEW_LINE> fd.close() <NEW_LINE> with open(name) as f: <NEW_LINE> <INDENT> self.assertEqual(f.read(), "bar") <NEW_LINE> <DEDENT> <DEDENT> def test_right_mode(self): <NEW_LINE> <INDENT> fd1, name1 = self._call(0o700) <NEW_LINE> fd2, name2 = self._call(0o600) <NEW_LINE> self.assertTrue(compat.compare_file_modes(0o700, os.stat(name1).st_mode)) <NEW_LINE> self.assertTrue(compat.compare_file_modes(0o600, os.stat(name2).st_mode)) <NEW_LINE> fd1.close() <NEW_LINE> fd2.close() <NEW_LINE> <DEDENT> def test_default_exists(self): <NEW_LINE> <INDENT> fd1, name1 = self._call() <NEW_LINE> fd2, name2 = self._call() <NEW_LINE> fd3, name3 = self._call() <NEW_LINE> self.assertNotEqual(name1, name2) <NEW_LINE> self.assertNotEqual(name1, name3) <NEW_LINE> self.assertNotEqual(name2, name3) <NEW_LINE> self.assertEqual(os.path.dirname(name1), self.tempdir) <NEW_LINE> self.assertEqual(os.path.dirname(name2), self.tempdir) <NEW_LINE> self.assertEqual(os.path.dirname(name3), self.tempdir) <NEW_LINE> basename1 = os.path.basename(name2) <NEW_LINE> self.assertTrue(basename1.endswith("foo.txt")) <NEW_LINE> basename2 = os.path.basename(name2) <NEW_LINE> self.assertTrue(basename2.endswith("foo.txt")) <NEW_LINE> basename3 = os.path.basename(name3) <NEW_LINE> self.assertTrue(basename3.endswith("foo.txt")) <NEW_LINE> fd1.close() <NEW_LINE> fd2.close() <NEW_LINE> fd3.close() | Tests for certbot.util.unique_file. | 62599053b7558d58954649a0 |
class enable_options_from_developer_options( parent_ui_steps.enable_options_from_developer_options): <NEW_LINE> <INDENT> pass | description:
enables an option from developer options
if <enabled> parameter is True, <Developer options> is enabled
usage:
ui_steps.enable_options_from_developer_options(developer_options =
["Verify apps over USB"])()
tags:
ui, android, enable, developer options | 6259905321a7993f00c6745a |
class User(flask_restful.Resource): <NEW_LINE> <INDENT> SCHEMA_POST = { "type": "object", "properties": { "bio": {"type": "string"}, "username": {"type": "string"}, "password": {"type": "string"}, }, "required": ["username", "password"], } <NEW_LINE> @limiter.limit(config.LIMITS_USER_GET) <NEW_LINE> def get(self, user_id=None, username=None): <NEW_LINE> <INDENT> if user_id is not None: <NEW_LINE> <INDENT> user = db.session.query(models.User).get(user_id) <NEW_LINE> if user is None: <NEW_LINE> <INDENT> message = "No user matching ID: %s" % user_id <NEW_LINE> flask_restful.abort(404, message=message) <NEW_LINE> <DEDENT> <DEDENT> elif username is not None: <NEW_LINE> <INDENT> user = (db.session.query(models.User) .filter(models.User.username == username).first()) <NEW_LINE> if user is None: <NEW_LINE> <INDENT> message = "No user matching username: %s" % username <NEW_LINE> flask_restful.abort(404, message=message) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> message = "Must specify user_id or username." <NEW_LINE> flask_restful.abort(400, message=message) <NEW_LINE> <DEDENT> return user.to_dict() <NEW_LINE> <DEDENT> @limiter.limit(config.LIMITS_USER_POST) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> json_data = get_valid_json(self.SCHEMA_POST) <NEW_LINE> bio = json_data.get('bio') <NEW_LINE> username = json_data['username'] <NEW_LINE> password = json_data['password'] <NEW_LINE> new_user = models.User(username, password, bio=bio) <NEW_LINE> db.session.add(new_user) <NEW_LINE> try: <NEW_LINE> <INDENT> db.session.commit() <NEW_LINE> <DEDENT> except sqlalchemy.exc.IntegrityError: <NEW_LINE> <INDENT> message = "A user already exists with username: %s" % username <NEW_LINE> flask_restful.abort(400, message=message) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return new_user.to_dict() | User account resource; manage users
in the system. | 6259905382261d6c52730940 |
class GammatonegramTester: <NEW_LINE> <INDENT> def __init__(self, name, args, sig, erb_fb_out, expected): <NEW_LINE> <INDENT> self.signal = np.asarray(sig) <NEW_LINE> self.expected = np.asarray(expected) <NEW_LINE> self.erb_fb_out = np.asarray(erb_fb_out) <NEW_LINE> self.args = args <NEW_LINE> self.description = "Gammatonegram for {:s}".format(name) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> with patch( 'gammatone.gtgram.erb_filterbank', return_value=self.erb_fb_out): <NEW_LINE> <INDENT> result = gammatone.gtgram.gtgram(self.signal, *self.args) <NEW_LINE> max_diff = np.max(np.abs(result - self.expected)) <NEW_LINE> diagnostic = "Maximum difference: {:6e}".format(max_diff) <NEW_LINE> assert np.allclose(result, self.expected, rtol=1e-6, atol=1e-12), diagnostic | Testing class for gammatonegram calculation | 625990532ae34c7f260ac5d3 |
@method_decorator(login_required, name='dispatch') <NEW_LINE> class CreateEntrega(FormView): <NEW_LINE> <INDENT> form_class = EntregaForm <NEW_LINE> template_name = 'delivery_helper_app/entrega_create.html' <NEW_LINE> def get_form_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(CreateEntrega, self).get_form_kwargs() <NEW_LINE> kwargs['usuario'] = self.request.user.usuario_set.all().get() <NEW_LINE> return kwargs <NEW_LINE> <DEDENT> def form_invalid(self, form): <NEW_LINE> <INDENT> error(self.request, u"Errores en el formulario.") <NEW_LINE> return super(CreateEntrega, self).form_invalid(form) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> form.save(commit=False) <NEW_LINE> entrega = form.instance <NEW_LINE> entrega.usuario = self.request.user.usuario_set.all().get() <NEW_LINE> entrega.save() <NEW_LINE> success(self.request, u"Entrega %s registrada con éxito." % entrega) <NEW_LINE> if entrega.esta_en_reparto(): <NEW_LINE> <INDENT> success(self.request, u"La entrega %s está en reparto." % entrega) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warning(self.request, u"No se puede determinar si la entrega %s está ya en reparto." % entrega) <NEW_LINE> <DEDENT> return HttpResponseRedirect(reverse_lazy('entregaslist')) | Create Entrega view. | 6259905307d97122c4218197 |
class sublime_plugin(object): <NEW_LINE> <INDENT> all_callbacks = { 'on_load': [] } <NEW_LINE> '''Classes''' <NEW_LINE> class WindowCommand(object): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class TextCommand(object): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class EventListener(object): <NEW_LINE> <INDENT> pass | Constants | 625990532ae34c7f260ac5d4 |
class SyncRecipeCreator(SingleOutputRecipeCreator): <NEW_LINE> <INDENT> def __init__(self, name, project): <NEW_LINE> <INDENT> SingleOutputRecipeCreator.__init__(self, 'sync', name, project) | Create a Sync recipe | 62599053097d151d1a2c2564 |
class User(ndb.Model): <NEW_LINE> <INDENT> name = ndb.StringProperty(required=True) <NEW_LINE> email = ndb.StringProperty() <NEW_LINE> date_created = ndb.DateTimeProperty(auto_now_add=True) <NEW_LINE> @classmethod <NEW_LINE> def create_user(cls, user_name, email): <NEW_LINE> <INDENT> p_key = ndb.Key(User, user_name) <NEW_LINE> user = User(key=p_key, name=user_name, email=email) <NEW_LINE> user.put() <NEW_LINE> return user; | User profile | 62599053596a897236129026 |
class Qubit: <NEW_LINE> <INDENT> def __init__(self, label_circuit="", label_physical=""): <NEW_LINE> <INDENT> self.label_circuit = label_circuit <NEW_LINE> self.label_physical = label_physical <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[Circuit={}, Physical={}]".format(self.label_circuit, self.label_physical) | Qubit class. Even though this class contains no method, this class
will become handy in recursively generating DMERA ansatz at different
scales. | 625990531f037a2d8b9e52e4 |
class CompleteMultiPartUpload(object): <NEW_LINE> <INDENT> def __init__(self, bucket=None): <NEW_LINE> <INDENT> self.bucket = bucket <NEW_LINE> self.location = None <NEW_LINE> self.bucket_name = None <NEW_LINE> self.key_name = None <NEW_LINE> self.etag = None <NEW_LINE> self.version_id = None <NEW_LINE> self.encrypted = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<CompleteMultiPartUpload: %s.%s>' % (self.bucket_name, self.key_name) <NEW_LINE> <DEDENT> def startElement(self, name, attrs, connection): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def endElement(self, name, value, connection): <NEW_LINE> <INDENT> if name.startswith("ns2"): <NEW_LINE> <INDENT> name = name.split(":")[1] <NEW_LINE> <DEDENT> if name == 'Location': <NEW_LINE> <INDENT> self.location = value <NEW_LINE> <DEDENT> elif name == 'Bucket': <NEW_LINE> <INDENT> self.bucket_name = value <NEW_LINE> <DEDENT> elif name == 'Key': <NEW_LINE> <INDENT> self.key_name = value <NEW_LINE> <DEDENT> elif name == 'ETag': <NEW_LINE> <INDENT> self.etag = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, name, value) | Represents a completed MultiPart Upload. Contains the
following useful attributes:
* location - The URI of the completed upload
* bucket_name - The name of the bucket in which the upload
is contained
* key_name - The name of the new, completed key
* etag - The MD5 hash of the completed, combined upload
* version_id - The version_id of the completed upload
* encrypted - The value of the encryption header | 62599053b830903b9686eef4 |
class Help(SidebarBase): <NEW_LINE> <INDENT> shorthand = 'help' <NEW_LINE> def context(self, http_client, request): <NEW_LINE> <INDENT> subtemplates = [] <NEW_LINE> for layer_name in sorted(layer_names(request)): <NEW_LINE> <INDENT> template_name = 'regulations/sidebar/help/{}.html'.format( layer_name) <NEW_LINE> try: <NEW_LINE> <INDENT> template = get_template(template_name) <NEW_LINE> subtemplates.append(template.render( {'cfr_part': self.cfr_part})) <NEW_LINE> <DEDENT> except TemplateDoesNotExist: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return {'subtemplates': subtemplates} | Help info; composed of subtemplates defined by the active layers | 625990538e71fb1e983bcfb8 |
class NodeStmt(Node): <NEW_LINE> <INDENT> def __init__(self, node): <NEW_LINE> <INDENT> super(NodeStmt, self).__init__() <NEW_LINE> self.node = node <NEW_LINE> <DEDENT> def eval(self, env): <NEW_LINE> <INDENT> return self.node.eval(env) | generated source for class NodeStmt | 625990534e4d5625663738f6 |
class RunResult: <NEW_LINE> <INDENT> def __init__(self, command: List[str], stdout: bytes, stderr: bytes, return_code: int, piped_out: bool, piped_err: bool) -> None: <NEW_LINE> <INDENT> self.command = command <NEW_LINE> self.stdout_piped = piped_out <NEW_LINE> self.stderr_piped = piped_err <NEW_LINE> if piped_out: <NEW_LINE> <INDENT> self.stdout = stdout.decode() <NEW_LINE> <DEDENT> if piped_err: <NEW_LINE> <INDENT> self.stderr = stderr.decode() <NEW_LINE> <DEDENT> self.return_code = return_code <NEW_LINE> <DEDENT> def __getattribute__(self, attr: str) -> Union[bool, str, int]: <NEW_LINE> <INDENT> if attr == 'stdout' and not self.stdout_piped: <NEW_LINE> <INDENT> raise ValueError("stdout was redirected to file, unable to access") <NEW_LINE> <DEDENT> if attr == 'stderr' and not self.stderr_piped: <NEW_LINE> <INDENT> raise ValueError("stderr was redirected to file, unable to access") <NEW_LINE> <DEDENT> return super().__getattribute__(attr) <NEW_LINE> <DEDENT> def successful(self) -> bool: <NEW_LINE> <INDENT> return not self.return_code <NEW_LINE> <DEDENT> def get_command_string(self) -> str: <NEW_LINE> <INDENT> return " ".join(self.command) | A container for simplifying the results of running a command | 625990530c0af96317c577d7 |
class CrossRefs(object): <NEW_LINE> <INDENT> InterestingXRef = { "GO", "HAMAP", "InterPro", "Gene3D", "SUPFAM", "PANTHER", "Pfam", "PIRSF", "PRINTS", "ProDom", "SMART", "TIGRFAMs", "PROSITE", } | Class listing the types of cross references to be considered. | 62599053d99f1b3c44d06b8e |
class Solution: <NEW_LINE> <INDENT> def lengthOfLongestSubstring(self, s): <NEW_LINE> <INDENT> seen = {} <NEW_LINE> ret = char_to_start = 0 <NEW_LINE> for idx, context in enumerate(s): <NEW_LINE> <INDENT> if context in seen and char_to_start <= seen[context]: <NEW_LINE> <INDENT> char_to_start = seen[context] + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = max(ret, idx - char_to_start + 1) <NEW_LINE> <DEDENT> seen[context] = idx <NEW_LINE> <DEDENT> return ret | def lengthOfLongestSubstring(self, s):
if len(s) == 0:
return 0
ret = 1
i = 0
while i < len(s) - 1:
j = i + 1
uni_set = set()
uni_set.add(s[i])
i += 1
while j < len(s):
if s[j] not in uni_set:
uni_set.add(s[j])
j += 1
else:
break
temp = len(uni_set)
if temp >= ret:
ret = temp
return ret | 625990537d847024c075d8c9 |
class TestIntegerPropertyDefinitionResource(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testIntegerPropertyDefinitionResource(self): <NEW_LINE> <INDENT> pass | IntegerPropertyDefinitionResource unit test stubs | 62599053287bf620b62730de |
class GetTeamInfo(REST): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(GetTeamInfo, self).__init__('getteaminfo.do', 3.0, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(self, **args): <NEW_LINE> <INDENT> return self().GET(args, format='text') | class: veracode.API.admin.GetTeamInfo
params: dynamic, see veracode.SDK.admin.GetTeamInfo for more info
returns: XML data from veracode API | 62599053435de62698e9d2f1 |
class armAnimation: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.flagInit = True <NEW_LINE> self.fig, self.ax = plt.subplots() <NEW_LINE> self.handle = [] <NEW_LINE> self.length=P.length <NEW_LINE> self.width=P.width <NEW_LINE> plt.axis([-2.0*P.length, 2.0*P.length, -2.0*P.length, 2.0*P.length]) <NEW_LINE> plt.plot([0, P.length], [0, 0],'k--') <NEW_LINE> <DEDENT> def drawArm(self, u): <NEW_LINE> <INDENT> theta = u[0] <NEW_LINE> X = [0, self.length*np.cos(theta)] <NEW_LINE> Y = [0, self.length*np.sin(theta)] <NEW_LINE> if self.flagInit == True: <NEW_LINE> <INDENT> line, =self.ax.plot(X, Y, lw=5, c='blue') <NEW_LINE> self.handle.append(line) <NEW_LINE> self.flagInit=False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.handle[0].set_xdata(X) <NEW_LINE> self.handle[0].set_ydata(Y) | Create arm animation | 6259905330dc7b76659a0cf6 |
class syncContacts_result(object): <NEW_LINE> <INDENT> def __init__(self, success=None, e=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e = e <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.MAP: <NEW_LINE> <INDENT> self.success = {} <NEW_LINE> (_ktype299, _vtype300, _size298) = iprot.readMapBegin() <NEW_LINE> for _i302 in range(_size298): <NEW_LINE> <INDENT> _key303 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> _val304 = ContactRegistration() <NEW_LINE> _val304.read(iprot) <NEW_LINE> self.success[_key303] = _val304 <NEW_LINE> <DEDENT> iprot.readMapEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e = TalkException() <NEW_LINE> self.e.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('syncContacts_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.MAP, 0) <NEW_LINE> oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.success)) <NEW_LINE> for kiter305, viter306 in self.success.items(): <NEW_LINE> <INDENT> oprot.writeString(kiter305.encode('utf-8') if sys.version_info[0] == 2 else kiter305) <NEW_LINE> viter306.write(oprot) <NEW_LINE> <DEDENT> oprot.writeMapEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e', TType.STRUCT, 1) <NEW_LINE> self.e.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- e | 6259905307f4c71912bb0928 |
class Winch(Subsystem): <NEW_LINE> <INDENT> def __init__(self, robot): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.robot = robot <NEW_LINE> self.motor = wpilib.Jaguar(4) <NEW_LINE> <DEDENT> def initDefaultCommand(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def manualSet(self, output): <NEW_LINE> <INDENT> self.motor.set(output*.85) <NEW_LINE> <DEDENT> def log(self): <NEW_LINE> <INDENT> pass | Runs the winch. | 625990532ae34c7f260ac5d5 |
class Index(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return success(success_msg['landing_page']) | Root endpoint | 6259905373bcbd0ca4bcb77f |
class TokenizationError(Exception): <NEW_LINE> <INDENT> def __init__(self, message, line, position): <NEW_LINE> <INDENT> Exception.__init__(self, message) <NEW_LINE> self.message = message <NEW_LINE> self.line = line <NEW_LINE> self.position = position <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message | Raised when a problem tokenizing text is encountered | 62599053b57a9660fecd2f6a |
class test_hophdr_keepalive_toClt(ProxyTest): <NEW_LINE> <INDENT> def test_hophdr_keepalive_toClt(self): <NEW_LINE> <INDENT> self.start_test() <NEW_LINE> <DEDENT> def get_response_headers(self, content): <NEW_LINE> <INDENT> headers = super(test_hophdr_keepalive_toClt, self).get_response_headers(content) <NEW_LINE> headers.append("Keep-Alive: foo") <NEW_LINE> return headers <NEW_LINE> <DEDENT> def check_response_headers(self, response): <NEW_LINE> <INDENT> if response.has_header("Keep-Alive"): <NEW_LINE> <INDENT> self.assertNotEqual(response.get_header("Keep-Alive"), "foo") | Proxy must not forward Keep-Alive: response header | 62599053ac7a0e7691f739d0 |
class TestFeed(TestSetupMixin, object): <NEW_LINE> <INDENT> def test_accept_entry(self): <NEW_LINE> <INDENT> assert self.feed._accept_entry(GOOD_FEED_ENTRY) is True <NEW_LINE> assert self.feed._accept_entry(FOOBAR_FEED_ENTRY) is False <NEW_LINE> assert self.feed._accept_entry(STALE_FEED_ENTRY) is False <NEW_LINE> <DEDENT> @patch('feedbot.bot.Feed.get_raw_feed') <NEW_LINE> def test_get_filtered_stream(self, feed): <NEW_LINE> <INDENT> feed.return_value = FeedParserDict({'entries': [GOOD_FEED_ENTRY, FOOBAR_FEED_ENTRY, STALE_FEED_ENTRY]}) <NEW_LINE> assert self.feed.get_filtered_feed() == [GOOD_FEED_ENTRY] <NEW_LINE> <DEDENT> @patch('feedbot.bot.Feed.get_raw_feed') <NEW_LINE> def test_get_filtered_stream_raises_stream_error(self, feed): <NEW_LINE> <INDENT> feed.return_value = FeedParserDict() <NEW_LINE> with pytest.raises(FeedDataError): <NEW_LINE> <INDENT> self.feed.get_filtered_feed() <NEW_LINE> <DEDENT> <DEDENT> def test_add_filter(self): <NEW_LINE> <INDENT> number_of_filters = len(self.feed.get_filters()) <NEW_LINE> new_filter = NotFilter("bad juju") <NEW_LINE> self.feed.add_filter(new_filter) <NEW_LINE> assert len(self.feed.get_filters()) == number_of_filters + 1 <NEW_LINE> assert new_filter in self.feed.get_filters() <NEW_LINE> <DEDENT> def test_remove_filter(self): <NEW_LINE> <INDENT> number_of_filters = len(self.feed.get_filters()) <NEW_LINE> self.feed.remove_filter(self.age_filter) <NEW_LINE> assert self.age_filter not in self.feed.get_filters() <NEW_LINE> assert len(self.feed.get_filters()) == number_of_filters - 1 <NEW_LINE> assert self.not_filter in self.feed.get_filters() <NEW_LINE> <DEDENT> def test_get_filters(self): <NEW_LINE> <INDENT> assert self.feed.get_filters() == self.filters <NEW_LINE> <DEDENT> def test_get_filter_by_key(self): <NEW_LINE> <INDENT> for index, feed_filter in enumerate(self.filters): <NEW_LINE> <INDENT> assert self.feed.get_filter_by_key(index) == feed_filter | Tests for the feedbot Feed class. | 62599053379a373c97d9a515 |
class TestDocCode(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testDocCode(self): <NEW_LINE> <INDENT> model = swagger_client.models.doc_code.DocCode() | DocCode unit test stubs | 62599053b830903b9686eef5 |
class StorageProfiles(Task): <NEW_LINE> <INDENT> def __init__(self, datalab): <NEW_LINE> <INDENT> Task.__init__(self, datalab, 'list_storage_profiles', 'List the available Storage Manager profiles') <NEW_LINE> self.addOption("profile", Option("profile", "", "Profile to list", required=False, default=None)) <NEW_LINE> self.addOption("format", Option("format", "", "Output format (csv|text)", required=False, default='text')) <NEW_LINE> self.addStdOptions() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> token = getUserToken(self) <NEW_LINE> print (str(storeClient.list_profiles (token, profile=self.profile.value, format=self.format.value))) | List the available Storage Manager profiles. | 62599053009cb60464d02a2e |
class TestCase(NdParentTest): <NEW_LINE> <INDENT> def run_test(self, redant): <NEW_LINE> <INDENT> redant.set_volume_options(self.vol_name, {'storage.reserve': '50'}, self.server_list[0]) <NEW_LINE> redant.validate_volume_option(self.vol_name, {'storage.reserve': '50'}, self.server_list[0]) <NEW_LINE> redant.reset_volume_option(self.vol_name, 'storage.reserve', self.server_list[0]) <NEW_LINE> redant.validate_volume_option(self.vol_name, {'storage.reserve': '1'}, self.server_list[0]) | Testing set and reset of Reserve limit in GlusterD | 6259905355399d3f05627a0e |
class Entity(object): <NEW_LINE> <INDENT> parameters = list() <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> if not hasattr(self, 'values'): <NEW_LINE> <INDENT> self.values = dict() <NEW_LINE> <DEDENT> for field, field_type, in self.parameters: <NEW_LINE> <INDENT> if field in kwargs: <NEW_LINE> <INDENT> self.values[field] = kwargs[field] <NEW_LINE> del kwargs[field] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.values[field] = field_type.default() <NEW_LINE> <DEDENT> <DEDENT> super(Entity, self).__init__(**kwargs) <NEW_LINE> self.data = None <NEW_LINE> if hasattr(self, 'init'): <NEW_LINE> <INDENT> getattr(self, 'init')() <NEW_LINE> <DEDENT> <DEDENT> def player_data(self): <NEW_LINE> <INDENT> result = list() <NEW_LINE> for field, field_type, in self.parameters: <NEW_LINE> <INDENT> result.extend(field_type.get_player_data(self.values[field])) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def set_data(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> for field, field_type, in self.parameters: <NEW_LINE> <INDENT> field_type.register_data(self.values[field], data) <NEW_LINE> <DEDENT> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = { 'name' : self.name, 'library' : self.library, } <NEW_LINE> for field, field_type in self.parameters: <NEW_LINE> <INDENT> result[field] = field_type.value_to_dict(self.values[field]) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, rules): <NEW_LINE> <INDENT> if not hasattr(cls, 'entities'): <NEW_LINE> <INDENT> cls.entities = cls.list_entities() <NEW_LINE> <DEDENT> entity = cls.entities.get( rules['library'], dict()).get(rules['name'], None) <NEW_LINE> if entity is None: <NEW_LINE> <INDENT> raise NonExistingEntity(rules['library'], rules['name']) <NEW_LINE> <DEDENT> del rules['library'] <NEW_LINE> del rules['name'] <NEW_LINE> for field, field_type in entity.parameters: <NEW_LINE> <INDENT> rules[field] = field_type.value_from_dict(rules[field]) <NEW_LINE> <DEDENT> return entity(**rules) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def list_entities(cls): <NEW_LINE> <INDENT> subs = classes.list_subclasses(cls, __name__, __path__) <NEW_LINE> result = collections.defaultdict(dict) <NEW_LINE> for sub in subs: <NEW_LINE> <INDENT> result[sub.library][sub.name] = sub <NEW_LINE> <DEDENT> return result | Base class for a rule
| 6259905324f1403a92686347 |
class RegisterTokensTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_register_token(self): <NEW_LINE> <INDENT> class Token(tdparser.Token): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> lexer = tdparser.Lexer() <NEW_LINE> self.assertEqual(0, len(lexer.tokens)) <NEW_LINE> lexer.register_token(Token, r'a') <NEW_LINE> self.assertEqual(1, len(lexer.tokens)) <NEW_LINE> <DEDENT> def test_register_token_re(self): <NEW_LINE> <INDENT> class Token(tdparser.Token): <NEW_LINE> <INDENT> regexp = r'a' <NEW_LINE> <DEDENT> lexer = tdparser.Lexer() <NEW_LINE> self.assertEqual(0, len(lexer.tokens)) <NEW_LINE> lexer.register_token(Token) <NEW_LINE> self.assertEqual(1, len(lexer.tokens)) <NEW_LINE> <DEDENT> def test_register_token_override_regexp(self): <NEW_LINE> <INDENT> class Token(tdparser.Token): <NEW_LINE> <INDENT> regexp = r'a' <NEW_LINE> <DEDENT> lexer = tdparser.Lexer() <NEW_LINE> self.assertEqual(0, len(lexer.tokens)) <NEW_LINE> lexer.register_token(Token, r'b') <NEW_LINE> self.assertEqual(1, len(lexer.tokens)) <NEW_LINE> token_class, match = lexer.tokens.get_token('a') <NEW_LINE> self.assertIsNone(token_class) <NEW_LINE> self.assertIsNone(match) <NEW_LINE> token_class, match = lexer.tokens.get_token('b') <NEW_LINE> self.assertEqual(Token, token_class) <NEW_LINE> self.assertIsNotNone(match) <NEW_LINE> <DEDENT> def test_register_tokens(self): <NEW_LINE> <INDENT> class AToken(tdparser.Token): <NEW_LINE> <INDENT> regexp = r'a' <NEW_LINE> <DEDENT> class BToken(tdparser.Token): <NEW_LINE> <INDENT> regexp = r'b' <NEW_LINE> <DEDENT> lexer = tdparser.Lexer() <NEW_LINE> self.assertEqual(0, len(lexer.tokens)) <NEW_LINE> lexer.register_tokens(AToken, BToken) <NEW_LINE> self.assertEqual(2, len(lexer.tokens)) <NEW_LINE> token_class, match = lexer.tokens.get_token('a') <NEW_LINE> self.assertEqual(AToken, token_class) <NEW_LINE> self.assertIsNotNone(match) <NEW_LINE> token_class, match = lexer.tokens.get_token('b') <NEW_LINE> self.assertEqual(BToken, token_class) <NEW_LINE> self.assertIsNotNone(match) | Tests for Lexer.register_token / Lexer.register_tokens. | 625990531f037a2d8b9e52e5 |
class CsapschannelprotocolEnum(Enum): <NEW_LINE> <INDENT> bellcore = 1 <NEW_LINE> itu = 2 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xe._meta import _CISCO_SONET_MIB as meta <NEW_LINE> return meta._meta_table['CiscoSonetMib.Csapsconfigtable.Csapsconfigentry.CsapschannelprotocolEnum'] | CsapschannelprotocolEnum
This object allows to configure APS channel protocol to
be implemented at Near End terminal.
K1 and K2 overhead bytes in a SONET signal are used as
an APS channel.
This channel is used to carry APS protocol.
Possible values\:
bellcore(1) \: Implements APS channel protocol as defined
in bellcore document GR\-253\-CORE.
itu(2) \: Implements APS channel protocol as defined in
ITU document G.783.
.. data:: bellcore = 1
.. data:: itu = 2 | 62599053be383301e0254d04 |
class NoodleEditGroup(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "node."+APIPRE+"_edit_group" <NEW_LINE> bl_label = "Edit Group ("+NODETREE_EDITOR_NAME+")" <NEW_LINE> node_path : bpy.props.StringProperty(name="node_path") <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return True <NEW_LINE> space = context.space_data <NEW_LINE> if space.type != "NODE_EDITOR": <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> ntree = space.node_tree <NEW_LINE> if ntree == undefined: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if ntree.bl_idname != NODETREE_TYPE: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> space = context.space_data <NEW_LINE> ntree = space.node_tree <NEW_LINE> active = context.active_node <NEW_LINE> if self.node_path != None: <NEW_LINE> <INDENT> print(self.node_path) <NEW_LINE> node = utils.get_node_from_path(self.node_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = active <NEW_LINE> <DEDENT> space.path.append(node.node_tree) <NEW_LINE> return {'FINISHED'} | Make newgroup node from selected nodes | 625990534a966d76dd5f03df |
class PornImgReviewTemplateInfoForUpdate(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Switch = None <NEW_LINE> self.LabelSet = None <NEW_LINE> self.BlockConfidence = None <NEW_LINE> self.ReviewConfidence = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Switch = params.get("Switch") <NEW_LINE> self.LabelSet = params.get("LabelSet") <NEW_LINE> self.BlockConfidence = params.get("BlockConfidence") <NEW_LINE> self.ReviewConfidence = params.get("ReviewConfidence") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 画面鉴别涉及令人反感的信息的任务控制参数。
| 6259905391af0d3eaad3b319 |
class ComputedStringField(fields.StringField): <NEW_LINE> <INDENT> def populate_obj(self, obj, name): <NEW_LINE> <INDENT> pass | Use on computed fields and forms with mixins to skip populate_obj. | 6259905338b623060ffaa2c7 |
class Todo: <NEW_LINE> <INDENT> title = 'My Todo' <NEW_LINE> tasks = [] <NEW_LINE> def __init__(self, title) -> None: <NEW_LINE> <INDENT> self.title = title <NEW_LINE> pass <NEW_LINE> <DEDENT> def rename(self, newTitle): <NEW_LINE> <INDENT> self.title = newTitle <NEW_LINE> return <NEW_LINE> <DEDENT> def addTask(self, description): <NEW_LINE> <INDENT> self.tasks.append(Task(description)) <NEW_LINE> <DEDENT> def deleteTask(self, index): <NEW_LINE> <INDENT> del self.tasks[index] <NEW_LINE> return <NEW_LINE> <DEDENT> def checkTask(self, index): <NEW_LINE> <INDENT> self.tasks[index].check() <NEW_LINE> return <NEW_LINE> <DEDENT> def rewriteTask(self, index, description): <NEW_LINE> <INDENT> self.tasks[index].rewrite(description) | List of tasks to complete | 6259905345492302aabfd9c8 |
class HTTPSConnection(http_client.HTTPSConnection): <NEW_LINE> <INDENT> def is_local(self): <NEW_LINE> <INDENT> if self.sock is None: <NEW_LINE> <INDENT> self.connect() <NEW_LINE> <DEDENT> return self.sock.getsockname()[0] == self.sock.getpeername()[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def server_address(self): <NEW_LINE> <INDENT> return self.sock.getpeername()[:2] | Enhanced HTTPS connection. | 625990538da39b475be046db |
class RatelimitMismatchError(ConfigurationError): <NEW_LINE> <INDENT> pass | Raise when validating ratelimit (configured <=> api_response.headers) fails. | 6259905399cbb53fe68323db |
class Cat: <NEW_LINE> <INDENT> def __init__(self,name,age): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s的年龄是:%d岁"%(self.name,self.age) <NEW_LINE> <DEDENT> def eat(self): <NEW_LINE> <INDENT> print("%s在吃鱼..."%self.name) <NEW_LINE> <DEDENT> def catchMouse(self): <NEW_LINE> <INDENT> print("%s第一次抓老鼠是在%d岁的时候"%(self.name,self.age)) | 定义一个Cat类 | 625990530a50d4780f706837 |
class ProductionConfig(Config): <NEW_LINE> <INDENT> pass | Production Config | 62599053b5575c28eb713744 |
class HTML: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def send(bot, user_id, content): <NEW_LINE> <INDENT> bot.send_message( chat_id=user_id, text=content, parse_mode='HTML' ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_content(message): <NEW_LINE> <INDENT> return message.text_html | Wrapper class for handling messages with formatting entities | 62599053d486a94d0ba2d4ba |
class Algorithm(object): <NEW_LINE> <INDENT> def __init__(self, date, data_dir): <NEW_LINE> <INDENT> self.date = date <NEW_LINE> self.data_dir = data_dir <NEW_LINE> self.results = {} <NEW_LINE> try: <NEW_LINE> <INDENT> self.player_data = PlayerData(self.date, self.data_dir) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Base class algorithm for {} with data from {} '.format(self.date, self.data_dir) <NEW_LINE> <DEDENT> def get_top_picks(self, number): <NEW_LINE> <INDENT> return self.results[:number] | Base class that can be overridden with the specifics of an algorithm
but also provides basic interfaces for calling by wrapper and infrastructure code | 625990532ae34c7f260ac5d8 |
class Warning(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message | Exception raised for important warnings like data truncations
while inserting, etc. | 6259905326068e7796d4de39 |
class GetDigest(APIView): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated,) <NEW_LINE> mandatory_fields = ('site', 'begin_date', 'end_date') <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> params = request.GET.dict() <NEW_LINE> for key in self.mandatory_fields: <NEW_LINE> <INDENT> if key not in params.keys(): <NEW_LINE> <INDENT> raise ValidationError('Mandatory field: "%s" is missing' % key) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> site = Site.objects.get(pk=params['site']) <NEW_LINE> <DEDENT> except Site.DoesNotExist as ex: <NEW_LINE> <INDENT> raise NotFound(ex.message) <NEW_LINE> <DEDENT> if site.user != request.user: <NEW_LINE> <INDENT> raise PermissionDenied() <NEW_LINE> <DEDENT> begin_date = datetime.fromtimestamp(int(params['begin_date'])) <NEW_LINE> end_date = datetime.fromtimestamp(int(params['end_date'])) <NEW_LINE> aggregation = params.get('aggregation', 'day') <NEW_LINE> cached, digest_data = get_digest_data(site, begin_date, end_date, aggregation, cache_timeout=settings.DASHBOARD_DIGEST_CACHE_TIMEOUT) <NEW_LINE> return JsonResponse({'cached': cached, 'data': digest_data}) | Digest Api.
Examples:
http://api.addnow.dev/api/v1/digest?site=1&begin_date=<timestamp>&end_date=<timestamp>
http://api.addnow.dev/api/v1/digest?site=1&begin_date=<timestamp>&end_date=<timestamp>&aggregation=day | 625990537b25080760ed8758 |
class OHEMSampler(gluon.Block): <NEW_LINE> <INDENT> def __init__(self, ratio, min_samples=0, thresh=0.5): <NEW_LINE> <INDENT> super(OHEMSampler, self).__init__() <NEW_LINE> assert ratio > 0, "OHEMSampler ratio must > 0, {} given".format(ratio) <NEW_LINE> self._ratio = ratio <NEW_LINE> self._min_samples = min_samples <NEW_LINE> self._thresh = thresh <NEW_LINE> <DEDENT> def forward(self, x, logits, ious): <NEW_LINE> <INDENT> F = nd <NEW_LINE> num_positive = F.sum(x > -1, axis=1) <NEW_LINE> num_negative = self._ratio * num_positive <NEW_LINE> num_total = x.shape[1] <NEW_LINE> num_negative = F.minimum(F.maximum(self._min_samples, num_negative), num_total - num_positive) <NEW_LINE> positive = logits.slice_axis(axis=2, begin=1, end=-1) <NEW_LINE> background = logits.slice_axis(axis=2, begin=0, end=1).reshape((0, -1)) <NEW_LINE> maxval = positive.max(axis=2) <NEW_LINE> esum = F.exp(logits - maxval.reshape((0, 0, 1))).sum(axis=2) <NEW_LINE> score = -F.log(F.exp(background - maxval) / esum) <NEW_LINE> mask = F.ones_like(score) * -1 <NEW_LINE> score = F.where(x < 0, score, mask) <NEW_LINE> if len(ious.shape) == 3: <NEW_LINE> <INDENT> ious = F.max(ious, axis=2) <NEW_LINE> <DEDENT> score = F.where(ious < self._thresh, score, mask) <NEW_LINE> argmaxs = F.argsort(score, axis=1, is_ascend=False) <NEW_LINE> y = np.zeros(x.shape) <NEW_LINE> y[np.where(x.asnumpy() >= 0)] = 1 <NEW_LINE> argmaxs = argmaxs.asnumpy() <NEW_LINE> for i, num_neg in zip(range(x.shape[0]), num_negative.asnumpy().astype(np.int32)): <NEW_LINE> <INDENT> indices = argmaxs[i, :num_neg] <NEW_LINE> y[i, indices.astype(np.int32)] = -1 <NEW_LINE> <DEDENT> return F.array(y, ctx=x.context) | A sampler implementing Online Hard-negative mining.
As described in paper https://arxiv.org/abs/1604.03540.
Parameters
----------
ratio : float
Ratio of negative vs. positive samples. Values >= 1.0 is recommended.
min_samples : int, default 0
Minimum samples to be selected regardless of positive samples.
For example, if positive samples is 0, we sometimes still want some num_negative
samples to be selected.
thresh : float, default 0.5
IOU overlap threshold of selected negative samples. IOU must not exceed
this threshold such that good matching anchors won't be selected as
negative samples. | 625990538e7ae83300eea589 |
class Solution: <NEW_LINE> <INDENT> def backPackVIII(self, n, value, amount): <NEW_LINE> <INDENT> m = len(value) <NEW_LINE> dp = [[False] * (n+1) for i in range(m+1)] <NEW_LINE> for i in range(m+1): <NEW_LINE> <INDENT> dp[i][0] = True <NEW_LINE> <DEDENT> for i in range(1, m+1): <NEW_LINE> <INDENT> for j in range(1, n+1): <NEW_LINE> <INDENT> for k in range(amount[i-1]+1): <NEW_LINE> <INDENT> if j >= k*value[i-1]: <NEW_LINE> <INDENT> dp[i][j] |= dp[i-1][j-k*value[i-1]] <NEW_LINE> if dp[i][j]: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> ret = 0 <NEW_LINE> for j in range(1,n+1): <NEW_LINE> <INDENT> if dp[m][j]: <NEW_LINE> <INDENT> ret += 1 <NEW_LINE> <DEDENT> <DEDENT> return ret | @param n: the value from 1 - n
@param value: the value of coins
@param amount: the number of coins
@return: how many different value | 62599053be8e80087fbc0572 |
class Cybersource(PaymentMethod): <NEW_LINE> <INDENT> name = settings.SOURCE_TYPE <NEW_LINE> code = "cybersource" <NEW_LINE> serializer_class = PaymentMethodSerializer <NEW_LINE> def _record_payment(self, request, order, method_key, amount, reference, **kwargs): <NEW_LINE> <INDENT> extra_fields = {} <NEW_LINE> signals.pre_build_get_token_request.send( sender=self.__class__, extra_fields=extra_fields, request=request, order=order, method_key=method_key, ) <NEW_LINE> session_id = request.COOKIES.get(django_settings.SESSION_COOKIE_NAME) <NEW_LINE> operation = actions.CreatePaymentToken( session_id=session_id, order=order, method_key=method_key, amount=amount, server_hostname=request.META.get("HTTP_HOST", ""), customer_ip_address=request.META["REMOTE_ADDR"], fingerprint_session_id=request.session.get(CHECKOUT_FINGERPRINT_SESSION_ID), extra_fields=extra_fields, ) <NEW_LINE> url, fields = self._fields(operation) <NEW_LINE> return FormPostRequired(amount=amount, name="get-token", url=url, fields=fields) <NEW_LINE> <DEDENT> def record_created_payment_token(self, reply_log_entry, data): <NEW_LINE> <INDENT> token_string = data.get("payment_token") <NEW_LINE> card_num = data.get("req_card_number") <NEW_LINE> card_type = data.get("req_card_type") <NEW_LINE> try: <NEW_LINE> <INDENT> token = PaymentToken.objects.filter(token=token_string).get() <NEW_LINE> <DEDENT> except PaymentToken.DoesNotExist: <NEW_LINE> <INDENT> token = PaymentToken( log=reply_log_entry, token=token_string, masked_card_number=card_num, card_type=card_type, ) <NEW_LINE> token.save() <NEW_LINE> <DEDENT> return token, None <NEW_LINE> <DEDENT> def _fields(self, operation): <NEW_LINE> <INDENT> fields = [] <NEW_LINE> cs_fields = operation.fields() <NEW_LINE> editable_fields = cs_fields["unsigned_field_names"].split(",") <NEW_LINE> for key, value in cs_fields.items(): <NEW_LINE> <INDENT> fields.append( { "key": key, "value": value if isinstance(value, str) else value.decode(), "editable": (key in editable_fields), } ) <NEW_LINE> <DEDENT> return operation.url, fields | This is an example of how to implement a payment method that required some off-site
interaction, like Cybersource Secure Acceptance, for example. It returns a pending
status initially that requires the client app to make a form post, which in-turn
redirects back to us. This is a common pattern in PCI SAQ A-EP ecommerce sites. | 62599053596a897236129028 |
class TestNotEquals(unittest.TestCase): <NEW_LINE> <INDENT> def test_a(self): <NEW_LINE> <INDENT> v1 = versions.Version(version='1.2.3', name='foo') <NEW_LINE> v2 = versions.Version(version='2.2.3', name='bar') <NEW_LINE> self.assertTrue(v1 != v2) <NEW_LINE> self.assertTrue(v2 != v1) <NEW_LINE> <DEDENT> def test_b(self): <NEW_LINE> <INDENT> v1 = versions.Version(version='1.2.3', name='foo') <NEW_LINE> v2 = versions.Version(version='1.2', name='bar') <NEW_LINE> self.assertTrue(v1 != v2) <NEW_LINE> self.assertTrue(v2 != v1) <NEW_LINE> <DEDENT> def test_c(self): <NEW_LINE> <INDENT> v1 = versions.Version(version='1.2.3', name='foo') <NEW_LINE> v2 = versions.Version(version='1.2.3', name='bar') <NEW_LINE> self.assertFalse(v1 != v2) <NEW_LINE> self.assertFalse(v2 != v1) <NEW_LINE> <DEDENT> def test_d(self): <NEW_LINE> <INDENT> v1 = versions.Version(version='1.2.3', name='foo') <NEW_LINE> self.assertTrue(v1 != '3.4') | A suite of test that compare not equivalence, ``!=`` | 625990534a966d76dd5f03e1 |
class sbb_gz(models.Model): <NEW_LINE> <INDENT> sbb_gz_type_choice = ( (0, '测酒仪'), (1, '模拟机'), (2, '验卡器'), (3, '读卡器'), (4, '自助出退勤'), (5, '其他') ) <NEW_LINE> sbb_type = models.SmallIntegerField(choices=sbb_gz_type_choice, default=0, verbose_name="设备类型") <NEW_LINE> bianhao = models.CharField(max_length=64, verbose_name="设备编号") <NEW_LINE> business_unit = models.ForeignKey('Shudi', on_delete=models.CASCADE, verbose_name='属地') <NEW_LINE> guzhang_time = models.DateTimeField(null=True, blank=True, verbose_name="故障时间") <NEW_LINE> chuli_time = models.DateTimeField(null=True, blank=True, verbose_name="处理时间") <NEW_LINE> zhibanyuan = models.CharField(max_length=16, verbose_name="值班员") <NEW_LINE> chuliren = models.CharField(max_length=16, verbose_name="处理人") <NEW_LINE> yanshouren = models.CharField(max_length=16, verbose_name="验收人") <NEW_LINE> memo = models.TextField(null=True, blank=True, verbose_name='备注') <NEW_LINE> m_time = models.DateTimeField(auto_now=True, verbose_name='更新日期') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = '设备故障记录表' <NEW_LINE> verbose_name_plural = "设备故障记录表" <NEW_LINE> ordering = ['-m_time'] | 设备分类 | 62599053e64d504609df9e49 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.