code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class SignalBlocker(object): <NEW_LINE> <INDENT> def __init__(self, widget): <NEW_LINE> <INDENT> self.widget = widget <NEW_LINE> self.state = False <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.state = self.widget.blockSignals(True) <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> self.widget.blockSignals(self.state) | Context manager to block signals of the given QObject. | 62599072e76e3b2f99fda2fd |
class CollectionPortlet(base.Renderer, FolderListing): <NEW_LINE> <INDENT> _template = ViewPageTemplateFile("alternative_templates/portletcollection.pt") <NEW_LINE> render = _template | Extend portlet base renderer | 625990729c8ee82313040e04 |
class ListDetailResource: <NEW_LINE> <INDENT> def get_object(self, id: int) -> List: <NEW_LINE> <INDENT> return get_or_404(self.session, List, id=id) <NEW_LINE> <DEDENT> def on_get(self, request, response, id: int): <NEW_LINE> <INDENT> list_ = self.get_object(id) <NEW_LINE> response.json = list_.serialized <NEW_LINE> <DEDENT> def on_delete(self, request, response, id): <NEW_LINE> <INDENT> list_ = self.get_object(id) <NEW_LINE> self.session.delete(list_) <NEW_LINE> self.session.commit() <NEW_LINE> response.status = falcon.HTTP_204 | Manipulate a task list. | 625990727d847024c075dcd4 |
class ArrayPointsIndexer: <NEW_LINE> <INDENT> __slots__ = ('array',) <NEW_LINE> def __init__(self, array): <NEW_LINE> <INDENT> self.array = array <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.array.__getitem__(key, points=True) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.array.__setitem__(key, value, points=True) | Allows selection of arbitrary items in the array based on their N-dimensional label index.
Examples
--------
>>> arr = ndtest((2, 3, 4))
>>> arr
a b\c c0 c1 c2 c3
a0 b0 0 1 2 3
a0 b1 4 5 6 7
a0 b2 8 9 10 11
a1 b0 12 13 14 15
a1 b1 16 17 18 19
a1 b2 20 21 22 23
To select the two points with label coordinates
[a0, b0, c0] and [a1, b2, c2], you must do:
>>> arr.points[['a0', 'a1'], ['b0', 'b2'], ['c0', 'c2']]
a_b_c a0_b0_c0 a1_b2_c2
0 22
>>> arr.points['a0,a1', 'b0,b2', 'c0,c2']
a_b_c a0_b0_c0 a1_b2_c2
0 22
The number of label(s) on each dimension must be equal:
>>> arr.points['a0,a1', 'b0,b2', 'c0,c1,c2'] # doctest: +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
ValueError: all combined keys should have the same length | 6259907256b00c62f0fb41ca |
class ZillowError(Exception): <NEW_LINE> <INDENT> code = dict([ (0, 'Request successfully processed'), (1, 'Service error-there was a server-side error ' + 'while processing the request. \n ' + 'Check to see if your url is properly formed: delimiters, ' + 'character cases, etc.'), (2, 'The specified ZWSID parameter was invalid or ' + 'not specified in the request. \n' + 'Check if you have provided a ZWSID in your API call. ' + 'If yes, check if the ZWSID is keyed in correctly. ' + 'If it still doesn\'t work, contact Zillow to ' + 'get help on fixing your ZWSID.'), (3, 'Web services are currently unavailable.\n' + 'The Zillow Web Service is currently not available. ' + 'Please come back later and try again.'), (4, 'The API call is currently unavailable.\n' + 'The Zillow Web Service is currently not available. ' + 'Please come back later and try again.'), (500, 'Invalid or missing address parameter.\n' 'Check if the input address parameter ' + 'matches the format specified ' + 'in the input parameters table. When inputting a city name, the ' + 'state should also be given. A city name alone will not give ' + 'a valid address.'), (501, 'Invalid or missing citystatezip parameter. ' + 'Check if the input address parameter matches ' + 'the format specified ' + 'in the input parameters table. When inputting a city name, the ' + 'state should also be given. A city name alone will not give' + 'a valid address.'), (502, 'No results found. \n ' + 'Sorry, the address you provided is not found in the Zillow ' + 'property database.'), (503, 'Failed to resolve city, state or ZIP code.\n ' + 'Check if the city-state combination is valid. ' + 'Also check if you have provided a valid ZIP code.'), (504, 'No coverage for specified area. \nThe specified area ' + 'is not covered by the Zillow property database.'), (505, 'Timeout Your request timed out. \nThe server could be ' + 'busy or unavailable. Try again later.'), (506, 'Address string too long. \nIf address is valid, try using ' + 'abbreviations.'), (507, 'No exact match found. \n' + 'Verify that the given address is correct.'), (508, 'No exact match found for input address.'), ]) <NEW_LINE> def __init__(self, status, url=None, response=None): <NEW_LINE> <INDENT> Exception.__init__(self, status) <NEW_LINE> self.status = status <NEW_LINE> self.message = { 'code': status, 'text': self.code[int(status)]} <NEW_LINE> self.url = url <NEW_LINE> self.response = response <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self.message) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.__str__()) | Error messages copied from Zillow's API documentation
http://www.zillow.com/howto/api/GetDeepSearchResults.htm | 62599072097d151d1a2c296e |
class TestConstFolding(unittest.TestCase): <NEW_LINE> <INDENT> def generic_ConstFolding(self, origstring, refstring, nbits, lvl=False): <NEW_LINE> <INDENT> orig = ast.parse(origstring) <NEW_LINE> ref = ast.parse(refstring) <NEW_LINE> if lvl: <NEW_LINE> <INDENT> orig = Flattening().visit(orig) <NEW_LINE> ref = Flattening().visit(orig) <NEW_LINE> <DEDENT> orig = asttools.ConstFolding(orig, nbits).visit(orig) <NEW_LINE> self.assertTrue(asttools.Comparator().visit(orig, ref)) <NEW_LINE> <DEDENT> def test_basics(self): <NEW_LINE> <INDENT> corresp = {"45 + 2": ["47", 8], "(3 + 2 + x)": ["(5 + x)", 16], "2*230": ["204", 8], "2 - 4": ["254", 8], "- (3*45)": ["4294967161", 32], "(3 + x)*2 + 4": ["(3 + x)*2 + 4", 64]} <NEW_LINE> for origstring, [refstring, nbits] in corresp.iteritems(): <NEW_LINE> <INDENT> self.generic_ConstFolding(origstring, refstring, nbits) <NEW_LINE> <DEDENT> <DEDENT> def test_flattened_ast(self): <NEW_LINE> <INDENT> corresp = {"(x + 3) + 2": ["x + 5", 8], "((x ^ 14) ^ 234) ^ 48": ["x ^ 212", 8], "42*34*y*z": ["1428*y*z", 16]} <NEW_LINE> for origstring, [refstring, nbits] in corresp.iteritems(): <NEW_LINE> <INDENT> self.generic_ConstFolding(origstring, refstring, nbits, True) | Test constant folding transformer. | 6259907276e4537e8c3f0e7a |
class Region(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=80, unique=True) | ... | 625990724a966d76dd5f07e5 |
class CoffeeMaker: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.resources = { "water": 300, "milk": 200, "coffee": 100, } <NEW_LINE> <DEDENT> def report(self): <NEW_LINE> <INDENT> print(f"Water: {self.resources['water']}ml") <NEW_LINE> print(f"Milk: {self.resources['milk']}ml") <NEW_LINE> print(f"Coffee: {self.resources['coffee']}g") <NEW_LINE> <DEDENT> def is_resource_sufficient(self, drink): <NEW_LINE> <INDENT> can_make = True <NEW_LINE> for item in drink.ingredients: <NEW_LINE> <INDENT> if drink.ingredients[item] > self.resources[item]: <NEW_LINE> <INDENT> print(f"Sorry, there is not enough {item} in the coffee maker at the moment.\n") <NEW_LINE> can_make = False <NEW_LINE> <DEDENT> <DEDENT> return can_make <NEW_LINE> <DEDENT> def make_coffee(self, order): <NEW_LINE> <INDENT> for item in order.ingredients: <NEW_LINE> <INDENT> self.resources[item] -= order.ingredients[item] <NEW_LINE> <DEDENT> print(f"Coffee is ready, here is your {order.name} ☕️. Enjoy!\n") | Models the machine that makes the coffee | 6259907216aa5153ce401dd4 |
class HexField(pygame.sprite.Group): <NEW_LINE> <INDENT> def __init__(self, x, y, width, height): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.field = [] <NEW_LINE> self.pos = x, y <NEW_LINE> self.move_is_end = False <NEW_LINE> self.place_ship_result = cst.SUCCESS <NEW_LINE> self.create_field(x, y) <NEW_LINE> <DEDENT> def create_field(self, field_x, field_y): <NEW_LINE> <INDENT> for y in range(self.height): <NEW_LINE> <INDENT> self.field.append([]) <NEW_LINE> for x in range(self.width): <NEW_LINE> <INDENT> hex_x = x * 27 + field_x + (14 if y % 2 == 0 else 0) <NEW_LINE> hex_y = y * 24 + field_y <NEW_LINE> self.field[-1].append(HexTile(self, (hex_x, hex_y), (x, y))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_move_is_end(self): <NEW_LINE> <INDENT> return self.move_is_end <NEW_LINE> <DEDENT> def set_move_is_end(self, value): <NEW_LINE> <INDENT> self.move_is_end = value <NEW_LINE> <DEDENT> def get_place_ship_result(self): <NEW_LINE> <INDENT> return self.place_ship_result <NEW_LINE> <DEDENT> def set_place_ship_result(self, value): <NEW_LINE> <INDENT> self.place_ship_result = value <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_neighbor(pos, direction): <NEW_LINE> <INDENT> if direction == cst.RIGHT: <NEW_LINE> <INDENT> return pos[0] + 1, pos[1] <NEW_LINE> <DEDENT> elif direction == cst.LEFT: <NEW_LINE> <INDENT> return pos[0] - 1, pos[1] <NEW_LINE> <DEDENT> elif direction == cst.RIGHT_TOP: <NEW_LINE> <INDENT> return pos[0] + (1 if pos[1] % 2 == 0 else 0), pos[1] - 1 <NEW_LINE> <DEDENT> elif direction == cst.LEFT_TOP: <NEW_LINE> <INDENT> return pos[0] - pos[1] % 2, pos[1] - 1 <NEW_LINE> <DEDENT> elif direction == cst.LEFT_DOWN: <NEW_LINE> <INDENT> return pos[0] - pos[1] % 2, pos[1] + 1 <NEW_LINE> <DEDENT> elif direction == cst.RIGHT_DOWN: <NEW_LINE> <INDENT> return pos[0] + (1 if pos[1] % 2 == 0 else 0), pos[1] + 1 <NEW_LINE> <DEDENT> <DEDENT> def get_neighbors(self, pos): <NEW_LINE> <INDENT> neighbors = [] <NEW_LINE> for i in range(6): <NEW_LINE> <INDENT> neighbor = self.get_neighbor(pos, i) <NEW_LINE> if self.cell_in_field(neighbor): <NEW_LINE> <INDENT> neighbors.append(neighbor) <NEW_LINE> <DEDENT> <DEDENT> return neighbors <NEW_LINE> <DEDENT> def cell_in_field(self, pos): <NEW_LINE> <INDENT> return 0 <= pos[0] < self.width and 0 <= pos[1] < self.height <NEW_LINE> <DEDENT> def get_cell(self, pos): <NEW_LINE> <INDENT> if not self.cell_in_field(pos): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.field[pos[1]][pos[0]] <NEW_LINE> <DEDENT> def set_pos(self, pos): <NEW_LINE> <INDENT> for sprite in self.sprites(): <NEW_LINE> <INDENT> x, y = sprite.get_coords() <NEW_LINE> x += pos[0] - self.pos[0] <NEW_LINE> y += pos[1] - self.pos[1] <NEW_LINE> sprite.set_coords((x, y)) <NEW_LINE> <DEDENT> self.pos = pos | Игровое поле | 625990721f037a2d8b9e54e8 |
class TestIrSequenceQoyRangeEnd(SingleTransactionCase): <NEW_LINE> <INDENT> def test_ir_sequence_qoy_range_end_1_create(self): <NEW_LINE> <INDENT> seq = self.env["ir.sequence"].create( { "code": "test_qoy_range_end", "name": "Test sequence range end BE", "use_date_range": True, "prefix": "test-%(range_end_qoy)s-", "suffix": "-%(range_end_qoy)s", "padding": 4, } ) <NEW_LINE> self.assertTrue(seq) <NEW_LINE> year = fields.Date.today().year <NEW_LINE> date_range_1 = self.env["ir.sequence.date_range"].create( { "date_from": datetime.date(year=year - 1, month=11, day=1), "date_to": datetime.date(year=year, month=1, day=31), "sequence_id": seq.id, "number_next_actual": 314, } ) <NEW_LINE> self.assertTrue(date_range_1) <NEW_LINE> date_range_2 = self.env["ir.sequence.date_range"].create( { "date_from": datetime.date(year=year, month=2, day=1), "date_to": datetime.date(year=year, month=4, day=30), "sequence_id": seq.id, "number_next_actual": 42, } ) <NEW_LINE> self.assertTrue(date_range_2) <NEW_LINE> domain = [("sequence_id", "=", seq.id)] <NEW_LINE> date_ranges = self.env["ir.sequence.date_range"].search(domain) <NEW_LINE> self.assertEqual(len(date_ranges), 2) <NEW_LINE> <DEDENT> def test_ir_sequence_qoy_range_end_2_check_year(self): <NEW_LINE> <INDENT> year = fields.Date.today().year <NEW_LINE> datetime1 = datetime.datetime( year=year - 1, month=11, day=14, hour=10, minute=14, second=16, microsecond=0, ) <NEW_LINE> with freeze_time(datetime1): <NEW_LINE> <INDENT> value = self.env["ir.sequence"].next_by_code("test_qoy_range_end") <NEW_LINE> self.assertEqual(value, "test-1-0314-1") <NEW_LINE> value = self.env["ir.sequence"].next_by_code("test_qoy_range_end") <NEW_LINE> self.assertEqual(value, "test-1-0315-1") <NEW_LINE> <DEDENT> datetime2 = datetime.datetime( year=year, month=3, day=14, hour=15, minute=9, second=26, microsecond=535898 ) <NEW_LINE> with freeze_time(datetime2): <NEW_LINE> <INDENT> value = self.env["ir.sequence"].next_by_code("test_qoy_range_end") <NEW_LINE> self.assertEqual(value, "test-2-0042-2") <NEW_LINE> value = self.env["ir.sequence"].next_by_code("test_qoy_range_end") <NEW_LINE> self.assertEqual(value, "test-2-0043-2") <NEW_LINE> <DEDENT> <DEDENT> def test_ir_sequence_qoy_range_end_3_unlink(self): <NEW_LINE> <INDENT> seq = self.env["ir.sequence"].search([("code", "=", "test_qoy_range_end")]) <NEW_LINE> seq.unlink() | A few tests for a 'Standard' sequence with range end. | 62599072ad47b63b2c5a9149 |
class line(SVGelement): <NEW_LINE> <INDENT> def __init__(self,x1=None,y1=None,x2=None,y2=None,stroke=None,stroke_width=None,**args): <NEW_LINE> <INDENT> SVGelement.__init__(self,'line',**args) <NEW_LINE> if x1!=None: <NEW_LINE> <INDENT> self.attributes['x1']=_num_str(x1) <NEW_LINE> <DEDENT> if y1!=None: <NEW_LINE> <INDENT> self.attributes['y1']=_num_str(y1) <NEW_LINE> <DEDENT> if x2!=None: <NEW_LINE> <INDENT> self.attributes['x2']=_num_str(x2) <NEW_LINE> <DEDENT> if y2!=None: <NEW_LINE> <INDENT> self.attributes['y2']=_num_str(y2) <NEW_LINE> <DEDENT> if stroke_width!=None: <NEW_LINE> <INDENT> self.attributes['stroke-width']=stroke_width <NEW_LINE> <DEDENT> if stroke!=None: <NEW_LINE> <INDENT> self.attributes['stroke']=stroke | l=line(x1,y1,x2,y2,stroke,stroke_width,**args)
A line is defined by a begin x,y pair and an end x,y pair | 6259907299fddb7c1ca63a51 |
class PostgreSQLConnection(object): <NEW_LINE> <INDENT> log = txaio.make_logger() <NEW_LINE> def __init__(self, id, config): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.config = config <NEW_LINE> self.started = None <NEW_LINE> self.stopped = None <NEW_LINE> params = { 'host': config.get('host', 'localhost'), 'port': config.get('port', 5432), 'database': config['database'], 'user': config['user'], 'password': get_config_value(config, 'password'), } <NEW_LINE> self.pool = txpostgres.ConnectionPool(None, min=5, **params) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.started = datetime.utcnow() <NEW_LINE> return self.pool.start() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.stopped = datetime.utcnow() <NEW_LINE> return self.pool.close() <NEW_LINE> <DEDENT> def marshal(self): <NEW_LINE> <INDENT> return { u'id': self.id, u'started': utcstr(self.started), u'stopped': utcstr(self.stopped) if self.stopped else None, u'config': self.config, } | A PostgreSQL database connection pool. | 6259907238b623060ffaa4d2 |
class SafeTarget(Target): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> self.radius = 13 <NEW_LINE> arcade.draw_rectangle_filled(self.center.x, self.center.y, self.radius, self.radius, arcade.color.RED) <NEW_LINE> <DEDENT> def hit(self): <NEW_LINE> <INDENT> super().hit() <NEW_LINE> self.alive = False <NEW_LINE> return -10 | A safe target should not be hit and will penalize the
player if it is hit. 10 points will be deducted. | 6259907271ff763f4b5e90a5 |
class RegviewLoadCommand(gdb.Command): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super (RegviewLoadCommand, self).__init__ ("regview load", gdb.COMMAND_SUPPORT, gdb.COMPLETE_FILENAME) <NEW_LINE> <DEDENT> def invoke(self, arg, from_tty): <NEW_LINE> <INDENT> rv.load_definitions(arg) | Load register definitions from XML file. | 62599072627d3e7fe0e08783 |
class Jsonable(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _objHook(obj): <NEW_LINE> <INDENT> if isinstance(obj, datetime.datetime): <NEW_LINE> <INDENT> return obj.strftime('%Y-%m-%dT%H:%M:%S') <NEW_LINE> <DEDENT> if isinstance(obj, Jsonable): <NEW_LINE> <INDENT> return obj.json() <NEW_LINE> <DEDENT> if isinstance(obj, list): <NEW_LINE> <INDENT> return [Jsonable._objHook(item) for item in obj] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _typeHook(typeObj): <NEW_LINE> <INDENT> return typeObj in [bool, str,unicode, int, float, Jsonable, datetime.datetime, list] <NEW_LINE> <DEDENT> def json(self, objHook=lambda x: Jsonable._objHook(x), typeHook=lambda x: Jsonable._typeHook(x)): <NEW_LINE> <INDENT> if not hasattr(self, '__jsonattrs__'): <NEW_LINE> <INDENT> attrList = [] <NEW_LINE> for k, v in self.__dict__.iteritems(): <NEW_LINE> <INDENT> if typeHook(type(v)): <NEW_LINE> <INDENT> attrList.append(k) <NEW_LINE> <DEDENT> <DEDENT> self.__jsonattrs__ = attrList <NEW_LINE> <DEDENT> return {attr: objHook(getattr(self, attr)) for attr in self.__jsonattrs__} <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.json()) | Jsonable mixin class to provide json functionality and a number of simple helpers | 62599072460517430c432cd5 |
class HSplit(Block): <NEW_LINE> <INDENT> Block.alias('hsplit') <NEW_LINE> Block.input('value') <NEW_LINE> Block.output('half1') <NEW_LINE> Block.output('half2') <NEW_LINE> def update(self): <NEW_LINE> <INDENT> value = self.input.value <NEW_LINE> h = value.shape[0] / 2 <NEW_LINE> self.output.half1 = value[0:h, ...] <NEW_LINE> self.output.half2 = value[h:, ...] | Splits an array along the first axis. | 62599072283ffb24f3cf51a5 |
class CurrentRfidSerializer(serializers.HyperlinkedModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = CurrentRfid <NEW_LINE> fields = ('id', 'url', 'rfid') | Serializer for sending the current rfid value to a an angularJs client for prefilling a
web form based upon last scanned item that wasn't found in the database. | 62599072aad79263cf4300b2 |
class TestXmlNs0ChangeViewRequest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testXmlNs0ChangeViewRequest(self): <NEW_LINE> <INDENT> pass | XmlNs0ChangeViewRequest unit test stubs | 625990727c178a314d78e869 |
class Expression3(Expression): <NEW_LINE> <INDENT> def get(self, instance): <NEW_LINE> <INDENT> return len(instance.objectPlayer.journeys) | Journey->Number of nodes in the journey
Parameters:
0: (not found) ((unknown 27040))
Return type: Int | 625990723539df3088ecdb92 |
class Position(ModelBase): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> short_name = models.CharField(max_length=5, help_text="5 characters or fewer.") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.slug == None or self.slug == '': <NEW_LINE> <INDENT> self.slug = slugify(self.__unicode__()) <NEW_LINE> <DEDENT> super(Position, self).save(*args, **kwargs) | Represents a single Field Hockey position. | 62599072379a373c97d9a91c |
class CommentList(generics.ListCreateAPIView): <NEW_LINE> <INDENT> serializer_class = CommentSerializer <NEW_LINE> def get_post(self): <NEW_LINE> <INDENT> post = get_object_or_404(Post.objects, pk=self.kwargs['post']) <NEW_LINE> return post <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> comment = Comment.objects.filter(post=self.get_post()) <NEW_LINE> return comment <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(author=self.request.user, post_id=self.get_post().pk) | get: Выводит список всех коментариев к указанной рекомендации.
post: Создает новый комментарий к указанной рекомендации. | 62599072e76e3b2f99fda2ff |
class Task(object): <NEW_LINE> <INDENT> def __init__(self, input_dir, output_dir, **kwargs): <NEW_LINE> <INDENT> ffmpeg_exe = kwargs.get(str('ffmpeg_executable'), '/usr/local/bin/ffmpeg') <NEW_LINE> extension = kwargs.get(str('extension'), 'm4v') <NEW_LINE> other_args = kwargs.get(str('other_args'), '') <NEW_LINE> if not os.path.isfile(ffmpeg_exe): <NEW_LINE> <INDENT> sys.exit('ffmpeg not found at "%s"' % ffmpeg_exe) <NEW_LINE> <DEDENT> for item_name in os.listdir(input_dir): <NEW_LINE> <INDENT> item_path = os.path.join(input_dir, item_name) <NEW_LINE> if os.path.isfile(item_path): <NEW_LINE> <INDENT> self.transcode_file(item_path, output_dir, ffmpeg_exe, extension, other_args) <NEW_LINE> <DEDENT> elif os.path.isdir(item_path): <NEW_LINE> <INDENT> output_sub_dir = os.path.join(output_dir, item_name) <NEW_LINE> os.makedirs(output_sub_dir) <NEW_LINE> contained_files = get_file_paths_from_directory(item_path) <NEW_LINE> for contained_file in contained_files: <NEW_LINE> <INDENT> self.transcode_file(contained_file, output_sub_dir, ffmpeg_exe, extension, other_args) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def transcode_file(input_file, output_dir, ffmpeg_exe, extension, other_args): <NEW_LINE> <INDENT> output_file_name, _ = os.path.splitext(os.path.basename(input_file)) <NEW_LINE> output_file = os.path.join(output_dir, output_file_name + '.' + extension) <NEW_LINE> command = ffmpeg_exe <NEW_LINE> command += ' -i "%s"' % sanitize_file_path_for_shell(input_file) <NEW_LINE> if len(other_args) > 0: <NEW_LINE> <INDENT> command += ' %s' % other_args <NEW_LINE> <DEDENT> command += ' "%s"' % sanitize_file_path_for_shell(output_file) <NEW_LINE> print('Calling: %s' % command) <NEW_LINE> exit_code = subprocess.call(command, shell=True) <NEW_LINE> if exit_code > 0: <NEW_LINE> <INDENT> sys.exit(exit_code) | Documentation: https://docs.droppyapp.com/tasks/video-transcode | 6259907256b00c62f0fb41cc |
class TChannel(InteractionCrossSection): <NEW_LINE> <INDENT> def __init__(self, norm, v_ref): <NEW_LINE> <INDENT> self._vref = v_ref <NEW_LINE> super(TChannel, self).__init__(norm, self._velocity_dependence_kernel) <NEW_LINE> <DEDENT> @property <NEW_LINE> def kwargs(self): <NEW_LINE> <INDENT> return {'norm': self.norm, 'v_ref': self._vref} <NEW_LINE> <DEDENT> def _velocity_dependence_kernel(self, v): <NEW_LINE> <INDENT> r = v / self._vref <NEW_LINE> denom = (1 + r ** 2) ** 2 <NEW_LINE> return 1 / denom | This implements a velocity-dependent cross section of the form
sigma(v) = norm / (1 + (v/v_ref)^2 )^2 | 62599072fff4ab517ebcf117 |
class CurrencyRateUpdateService(models.Model): <NEW_LINE> <INDENT> _name = "currency.rate.update.service" <NEW_LINE> _description = "Currency Rate Update" <NEW_LINE> service = fields.Selection( [ ('Admin_ch_getter', 'Admin.ch'), ('ECB_getter', 'European Central Bank'), ('Yahoo_getter', 'Yahoo Finance '), ('PL_NBP_getter', 'Narodowy Bank Polski'), ('Banxico_getter', 'Banco de México'), ('CA_BOC_getter', 'Bank of Canada - noon rates'), ], "Webservice to use", required=True ) <NEW_LINE> currency_to_update = fields.Many2many( 'res.currency', 'res_curreny_auto_udate_rel', 'service_id', 'currency_id', 'currency to update with this service', ) <NEW_LINE> company_id = fields.Many2one( 'res.company', 'linked company', ) <NEW_LINE> note = fields.Text('update notice') <NEW_LINE> max_delta_days = fields.Integer('Max delta days', required=True, help="If the time delta between the rate date given by the webservice and the current date exeeds this value, then the currency rate is not updated in OpenERP.") <NEW_LINE> _defaults = { 'max_delta_days': lambda *a: 4, } <NEW_LINE> _sql_constraints = [ ( 'curr_service_unique', 'unique (service, company_id)', _('You can use a service one time per company !') ) ] <NEW_LINE> def _check_max_delta_days(self, cr, uid, ids): <NEW_LINE> <INDENT> for company in self.read(cr, uid, ids, ['max_delta_days']): <NEW_LINE> <INDENT> if company['max_delta_days'] >= 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> _constraints = [ (_check_max_delta_days, "'Max delta days' must be >= 0", ['max_delta_days']), ] | Class thats tell for wich services wich currencies
have to be updated | 62599072f548e778e596ce8a |
class Scan(): <NEW_LINE> <INDENT> def __init__(self, base_dir=os.getcwd()): <NEW_LINE> <INDENT> if base_dir == os.getcwd(): <NEW_LINE> <INDENT> print('Scanned current working directory') <NEW_LINE> <DEDENT> self.base_dir = self._format_directory(base_dir) <NEW_LINE> self._old_asset_dict = {} <NEW_LINE> self._asset_dict = {} <NEW_LINE> self.refresh() <NEW_LINE> <DEDENT> def refresh(self, block_comparison=False): <NEW_LINE> <INDENT> _ = self._asset_dict.copy() <NEW_LINE> self._asset_dict.clear() <NEW_LINE> changes = 0 <NEW_LINE> dirs = [] <NEW_LINE> files = [] <NEW_LINE> for target in glob.glob(self.base_dir + '**', recursive=True): <NEW_LINE> <INDENT> if os.path.islink(target): <NEW_LINE> <INDENT> raise Exception('Not built to handle links in tree:\n%s' % target) <NEW_LINE> <DEDENT> if os.path.isdir(target): <NEW_LINE> <INDENT> target = self._format_directory(target) <NEW_LINE> if target not in _: <NEW_LINE> <INDENT> changes += 1 <NEW_LINE> <DEDENT> dirs.append(_Directory(target, self)) <NEW_LINE> <DEDENT> elif os.path.isfile(target): <NEW_LINE> <INDENT> if target not in _: <NEW_LINE> <INDENT> changes += 1 <NEW_LINE> <DEDENT> files.append(_File(target, self)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Unrecognised target type:\n%s' % target) <NEW_LINE> <DEDENT> <DEDENT> for directory in dirs: <NEW_LINE> <INDENT> self._asset_dict[directory.path] = directory <NEW_LINE> <DEDENT> for file in files: <NEW_LINE> <INDENT> self._asset_dict[file.path] = file <NEW_LINE> <DEDENT> if not block_comparison: <NEW_LINE> <INDENT> if changes != 0: <NEW_LINE> <INDENT> self._old_asset_dict = self._asset_dict <NEW_LINE> print('%d changes since last refresh.' % changes) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._old_asset_dict = _ <NEW_LINE> print('No changes were encountered in the last refresh.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_files_of_type(self, extensions:list=[], img_search=False, return_objs=False): <NEW_LINE> <INDENT> if img_search: <NEW_LINE> <INDENT> extensions = ['.jpg', '.jpeg', '.png'] <NEW_LINE> <DEDENT> elif not extensions: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> _ = [] <NEW_LINE> for ext in extensions: <NEW_LINE> <INDENT> if ext.startswith('.'): <NEW_LINE> <INDENT> ext = ext[1:] <NEW_LINE> <DEDENT> _.append(ext.lower()) <NEW_LINE> <DEDENT> extensions = _ <NEW_LINE> ret_paths = [] <NEW_LINE> ret_objs = [] <NEW_LINE> for path, obj in self._asset_dict.items(): <NEW_LINE> <INDENT> if obj.type == 'file' and obj.extension: <NEW_LINE> <INDENT> if obj.extension.lower() in extensions: <NEW_LINE> <INDENT> ret_paths.append(obj.path) <NEW_LINE> ret_objs.append(obj) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if return_objs: <NEW_LINE> <INDENT> return ret_objs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ret_paths <NEW_LINE> <DEDENT> <DEDENT> def _format_directory(self, path:str): <NEW_LINE> <INDENT> path = path.replace('/', '\\') <NEW_LINE> if not path.endswith('\\'): <NEW_LINE> <INDENT> path += '\\' <NEW_LINE> <DEDENT> return path | The main class object used in this tool. Upon creation, it analyses the
tree of directories under the base_dir provided.
Call tree.refresh() to reanalyse the tree. | 625990728a43f66fc4bf3a92 |
class InformationItem(Item): <NEW_LINE> <INDENT> _id = Field() <NEW_LINE> NickName = Field() <NEW_LINE> Gender = Field() <NEW_LINE> Province = Field() <NEW_LINE> City = Field() <NEW_LINE> BriefIntroduction = Field() <NEW_LINE> Authentication = Field() <NEW_LINE> Num_Tweets = Field() <NEW_LINE> Num_Follows = Field() <NEW_LINE> Num_Fans = Field() <NEW_LINE> URL = Field() | 个人信息 | 6259907263b5f9789fe86a61 |
class ParticleFilter(object): <NEW_LINE> <INDENT> def __init__(self, particle_type, state_gen, num_particles=100): <NEW_LINE> <INDENT> self.particle_type = particle_type <NEW_LINE> self.state_generator = state_gen <NEW_LINE> self.M = num_particles <NEW_LINE> self.Y = N.zeros(self.M, self.particle_type) <NEW_LINE> self.Y['w'] = 1.0 / self.M <NEW_LINE> <DEDENT> def setall(self, *args): <NEW_LINE> <INDENT> self.Y['x'] = args <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> for dt, ctl_input, measurements in self.state_generator: <NEW_LINE> <INDENT> for particle in self.Y: <NEW_LINE> <INDENT> self.update_particle(particle, ctl_input, measurements, dt) <NEW_LINE> <DEDENT> self.resample() <NEW_LINE> yield self.Y <NEW_LINE> <DEDENT> <DEDENT> def update_particle(self, particle, ctl_input, measurements, dt): <NEW_LINE> <INDENT> particle['x'] = self.fwd_ctl_model(ctl_input, particle['x'], dt) <NEW_LINE> for ci, zi in measurements: <NEW_LINE> <INDENT> particle['w'] *= self.measurement_prob(zi, particle['x']) <NEW_LINE> <DEDENT> <DEDENT> def resample(self): <NEW_LINE> <INDENT> self.Y = N.sort(self.Y[:], order='w')[::-1] <NEW_LINE> cumW = N.cumsum(self.Y['w']) <NEW_LINE> cumW /= cumW[-1] <NEW_LINE> ii = cumW.searchsorted(N.random.random(self.M)) <NEW_LINE> self.Y = self.Y[ii] <NEW_LINE> for particle in self.Y: <NEW_LINE> <INDENT> particle['w'] = 1.0 <NEW_LINE> <DEDENT> <DEDENT> def fwd_ctl_model(self, ctl_input, state, dt): <NEW_LINE> <INDENT> raise NotImplementedError("fwd_ctl_model not overridden!") <NEW_LINE> <DEDENT> def measurement_prob(self, measurement, state): <NEW_LINE> <INDENT> raise NotImplementedError("measurement_prob overridden!") | Implements a generic particle filter, which can be easily extended to track
a probability distribution, such as the location of a robot.
For further details, see the algorithm description in:
S. Thrun et al., "Probabilistic Robotics", 1st Ed., p.98, Table 4.3 | 625990724f88993c371f119f |
class BedPEEntry(): <NEW_LINE> <INDENT> def __init__(self, chrom1, start1, end1, chrom2, start2, end2, name, *args): <NEW_LINE> <INDENT> self.chrom1 = chrom1 <NEW_LINE> self.start1 = start1 <NEW_LINE> self.end1 = end1 <NEW_LINE> self.chrom2 = chrom2 <NEW_LINE> self.start2 = start2 <NEW_LINE> self.end2 = end2 <NEW_LINE> self.name = name <NEW_LINE> self.rest = args <NEW_LINE> <DEDENT> def plainStr(self): <NEW_LINE> <INDENT> return "%s\t%d\t%d\t%s\t%d\t%d\t%s" % (self.chrom1, self.start1, self.end1, self.chrom2, self.start2, self.end2, self.name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> extra = "" <NEW_LINE> if len(self.rest) > 0: <NEW_LINE> <INDENT> extra = "\t" + "\t".join([str(x) for x in self.rest]) <NEW_LINE> <DEDENT> return self.plainStr() + extra <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.start < other.start <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.start > other.start | Same as a bed except the less than doesn't sort for chromosomes | 625990724a966d76dd5f07e7 |
class ListQueue: <NEW_LINE> <INDENT> def __init__(self, capacity): <NEW_LINE> <INDENT> self.list_queue = [None] * capacity <NEW_LINE> self._capacity = capacity <NEW_LINE> self.front = -1 <NEW_LINE> self.back = -1 <NEW_LINE> self.size = 0 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> result = 'ListQueue[' <NEW_LINE> for val in self.list_queue: <NEW_LINE> <INDENT> result += ' ' + str(val) <NEW_LINE> <DEDENT> return result + ']' <NEW_LINE> <DEDENT> def insert(self, val): <NEW_LINE> <INDENT> if self.back is self._capacity - 1: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.front is -1: <NEW_LINE> <INDENT> self.front = 0 <NEW_LINE> <DEDENT> self.back += 1 <NEW_LINE> self.list_queue[self.back] = val <NEW_LINE> self.size += 1 <NEW_LINE> <DEDENT> def remove(self): <NEW_LINE> <INDENT> if self.size is 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.list_queue = self.list_queue[1:] + [None] <NEW_LINE> self.back -= 1 <NEW_LINE> if self.back is -1: <NEW_LINE> <INDENT> self.front = -1 <NEW_LINE> <DEDENT> self.size -= 1 <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> return self.list_queue[self.front] <NEW_LINE> <DEDENT> def capacity(self): <NEW_LINE> <INDENT> return self._capacity | This is a list based implementation of a queue which will keep oldest data
and drop anything new that there is not room for | 6259907299fddb7c1ca63a52 |
class HIDBSL5Base(bsl5.BSL5): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> bsl5.BSL5.__init__(self) <NEW_LINE> self.hid_device = None <NEW_LINE> self.logger = logging.getLogger('BSL5') <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> def bsl(self, cmd, message='', expect=None, receive_response=True): <NEW_LINE> <INDENT> print("[bsl()] Control entered bsl()...") <NEW_LINE> self.logger.debug('Command 0x%02x (%d bytes)' % (cmd, 1+len(message))) <NEW_LINE> print('[bsl()] Command 0x%02x (%d bytes)' % (cmd, 1+len(message))) <NEW_LINE> txdata = bytearray(struct.pack('<BBB', 0x3f, 1+len(message), cmd).decode("utf8") + message, encoding="utf8") <NEW_LINE> txdata += b'\xac'*(64 - len(txdata)) <NEW_LINE> print('Sending command: %r %d Bytes' % (txdata, len(txdata))) <NEW_LINE> self.write_report(txdata) <NEW_LINE> if receive_response: <NEW_LINE> <INDENT> self.logger.debug('Reading answer...') <NEW_LINE> print('Reading answer...') <NEW_LINE> report = self.read_report() <NEW_LINE> if sys.platform == 'darwin': <NEW_LINE> <INDENT> self.logger.debug('report = %r' % report) <NEW_LINE> print('report = %r' % report) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.debug('report = %r' % report) <NEW_LINE> print('report = %r' % report) <NEW_LINE> <DEDENT> pi = report[0] <NEW_LINE> if pi == 0x3f: <NEW_LINE> <INDENT> length = report[1] <NEW_LINE> data = report[2:2+length] <NEW_LINE> return data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if pi: raise bsl5.BSL5Error('received bad PI, expected 0x3f (got 0x%02x)' % (pi,)) <NEW_LINE> raise bsl5.BSL5Error('received bad PI, expected 0x3f (got empty response)') | Implementation of the BSL protocol over HID.
A subclass needs to implement open(), close(), read_report() and
write_report(). | 6259907238b623060ffaa4d3 |
class ArrayQueryParameter(AbstractQueryParameter): <NEW_LINE> <INDENT> def __init__(self, name, array_type, values): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.array_type = array_type <NEW_LINE> self.values = values <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def positional(cls, array_type, values): <NEW_LINE> <INDENT> return cls(None, array_type, values) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_api_repr(cls, resource): <NEW_LINE> <INDENT> name = resource.get('name') <NEW_LINE> array_type = resource['parameterType']['arrayType']['type'] <NEW_LINE> values = [ value['value'] for value in resource['parameterValue']['arrayValues']] <NEW_LINE> converted = [ _CELLDATA_FROM_JSON[array_type](value, None) for value in values] <NEW_LINE> return cls(name, array_type, converted) <NEW_LINE> <DEDENT> def to_api_repr(self): <NEW_LINE> <INDENT> values = self.values <NEW_LINE> converter = _SCALAR_VALUE_TO_JSON.get(self.array_type) <NEW_LINE> if converter is not None: <NEW_LINE> <INDENT> values = [converter(value) for value in values] <NEW_LINE> <DEDENT> resource = { 'parameterType': { 'type': 'ARRAY', 'arrayType': { 'type': self.array_type, }, }, 'parameterValue': { 'arrayValues': [{'value': value} for value in values], }, } <NEW_LINE> if self.name is not None: <NEW_LINE> <INDENT> resource['name'] = self.name <NEW_LINE> <DEDENT> return resource | Named / positional query parameters for array values.
:type name: str or None
:param name: Parameter name, used via ``@foo`` syntax. If None, the
paramter can only be addressed via position (``?``).
:type array_type: str
:param array_type:
name of type of array elements. One of `'STRING'`, `'INT64'`,
`'FLOAT64'`, `'BOOL'`, `'TIMESTAMP'`, or `'DATE'`.
:type values: list of appropriate scalar type.
:param values: the parameter array values. | 62599072796e427e53850076 |
class CacheWrapper(object): <NEW_LINE> <INDENT> def __init__(self, cache): <NEW_LINE> <INDENT> self._cache = cache <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._cache.wrapper(*args, **kwargs) <NEW_LINE> <DEDENT> def expire_cache(self): <NEW_LINE> <INDENT> self._cache.expire_cache() | The cache wrapper | 625990721f037a2d8b9e54e9 |
class Video_multilevel_encoding(nn.Module): <NEW_LINE> <INDENT> def __init__(self, opt): <NEW_LINE> <INDENT> super(Video_multilevel_encoding, self).__init__() <NEW_LINE> self.rnn_output_size = opt.visual_rnn_size*2 <NEW_LINE> self.dropout = nn.Dropout(p=opt.dropout) <NEW_LINE> self.visual_norm = opt.visual_norm <NEW_LINE> self.concate = opt.concate <NEW_LINE> self.rnn = nn.GRU(opt.visual_feat_dim, opt.visual_rnn_size, batch_first=True, bidirectional=True) <NEW_LINE> self.convs1 = nn.ModuleList([ nn.Conv2d(1, opt.visual_kernel_num, (window_size, self.rnn_output_size), padding=(window_size - 1, 0)) for window_size in opt.visual_kernel_sizes ]) <NEW_LINE> self.visual_mapping = MFC(opt.visual_mapping_layers, opt.dropout, have_bn=True, have_last_bn=True) <NEW_LINE> <DEDENT> def forward(self, videos): <NEW_LINE> <INDENT> videos, videos_origin, lengths, vidoes_mask = videos <NEW_LINE> org_out = videos_origin <NEW_LINE> gru_init_out, _ = self.rnn(videos) <NEW_LINE> mean_gru = Variable(torch.zeros(gru_init_out.size(0), self.rnn_output_size)).cuda() <NEW_LINE> for i, batch in enumerate(gru_init_out): <NEW_LINE> <INDENT> mean_gru[i] = torch.mean(batch[:lengths[i]], 0) <NEW_LINE> <DEDENT> gru_out = mean_gru <NEW_LINE> gru_out = self.dropout(gru_out) <NEW_LINE> vidoes_mask = vidoes_mask.unsqueeze(2).expand(-1,-1,gru_init_out.size(2)) <NEW_LINE> gru_init_out = gru_init_out * vidoes_mask <NEW_LINE> con_out = gru_init_out.unsqueeze(1) <NEW_LINE> con_out = [F.relu(conv(con_out)).squeeze(3) for conv in self.convs1] <NEW_LINE> con_out = [F.max_pool1d(i, i.size(2)).squeeze(2) for i in con_out] <NEW_LINE> con_out = torch.cat(con_out, 1) <NEW_LINE> con_out = self.dropout(con_out) <NEW_LINE> if self.concate == 'full': <NEW_LINE> <INDENT> features = torch.cat((gru_out,con_out,org_out), 1) <NEW_LINE> <DEDENT> elif self.concate == 'reduced': <NEW_LINE> <INDENT> features = torch.cat((gru_out,con_out), 1) <NEW_LINE> <DEDENT> features = self.visual_mapping(features) <NEW_LINE> if self.visual_norm: <NEW_LINE> <INDENT> features = l2norm(features) <NEW_LINE> <DEDENT> return features <NEW_LINE> <DEDENT> def load_state_dict(self, state_dict): <NEW_LINE> <INDENT> own_state = self.state_dict() <NEW_LINE> new_state = OrderedDict() <NEW_LINE> for name, param in state_dict.items(): <NEW_LINE> <INDENT> if name in own_state: <NEW_LINE> <INDENT> new_state[name] = param <NEW_LINE> <DEDENT> <DEDENT> super(Video_multilevel_encoding, self).load_state_dict(new_state) | Section 3.1. Video-side Multi-level Encoding | 625990725fdd1c0f98e5f884 |
class DianpingCPUUsageest(CPUUsageTest): <NEW_LINE> <INDENT> def __init__(self, device_serial): <NEW_LINE> <INDENT> self.case_chinese_name = "CPU消耗测试" <NEW_LINE> self.package_name = "com.dianping.v1" <NEW_LINE> self.activity_name = ".NovaMainActivity" <NEW_LINE> self.device_serial = device_serial <NEW_LINE> super(DianpingCPUUsageest, self).__init__(self.package_name, self.activity_name, self.device_serial, self.case_chinese_name) <NEW_LINE> <DEDENT> def set_up(self): <NEW_LINE> <INDENT> self.robot.start_app() <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> def test(self): <NEW_LINE> <INDENT> for i in xrange(10): <NEW_LINE> <INDENT> self.robot.device.swipe(520, 1460, 520, 400) <NEW_LINE> time.sleep(1) <NEW_LINE> <DEDENT> <DEDENT> def tear_down(self): <NEW_LINE> <INDENT> self.robot.stop_app() | 测试点评CPU消耗 | 62599072435de62698e9d704 |
class MAVLink_storage_information_message(MAVLink_message): <NEW_LINE> <INDENT> id = MAVLINK_MSG_ID_STORAGE_INFORMATION <NEW_LINE> name = 'STORAGE_INFORMATION' <NEW_LINE> fieldnames = ['time_boot_ms', 'storage_id', 'storage_count', 'status', 'total_capacity', 'used_capacity', 'available_capacity', 'read_speed', 'write_speed'] <NEW_LINE> ordered_fieldnames = ['time_boot_ms', 'total_capacity', 'used_capacity', 'available_capacity', 'read_speed', 'write_speed', 'storage_id', 'storage_count', 'status'] <NEW_LINE> fieldtypes = ['uint32_t', 'uint8_t', 'uint8_t', 'uint8_t', 'float', 'float', 'float', 'float', 'float'] <NEW_LINE> format = '<IfffffBBB' <NEW_LINE> native_format = bytearray('<IfffffBBB', 'ascii') <NEW_LINE> orders = [0, 6, 7, 8, 1, 2, 3, 4, 5] <NEW_LINE> lengths = [1, 1, 1, 1, 1, 1, 1, 1, 1] <NEW_LINE> array_lengths = [0, 0, 0, 0, 0, 0, 0, 0, 0] <NEW_LINE> crc_extra = 179 <NEW_LINE> unpacker = struct.Struct('<IfffffBBB') <NEW_LINE> def __init__(self, time_boot_ms, storage_id, storage_count, status, total_capacity, used_capacity, available_capacity, read_speed, write_speed): <NEW_LINE> <INDENT> MAVLink_message.__init__(self, MAVLink_storage_information_message.id, MAVLink_storage_information_message.name) <NEW_LINE> self._fieldnames = MAVLink_storage_information_message.fieldnames <NEW_LINE> self.time_boot_ms = time_boot_ms <NEW_LINE> self.storage_id = storage_id <NEW_LINE> self.storage_count = storage_count <NEW_LINE> self.status = status <NEW_LINE> self.total_capacity = total_capacity <NEW_LINE> self.used_capacity = used_capacity <NEW_LINE> self.available_capacity = available_capacity <NEW_LINE> self.read_speed = read_speed <NEW_LINE> self.write_speed = write_speed <NEW_LINE> <DEDENT> def pack(self, mav, force_mavlink1=False): <NEW_LINE> <INDENT> return MAVLink_message.pack(self, mav, 179, struct.pack('<IfffffBBB', self.time_boot_ms, self.total_capacity, self.used_capacity, self.available_capacity, self.read_speed, self.write_speed, self.storage_id, self.storage_count, self.status), force_mavlink1=force_mavlink1) | Information about a storage medium. | 6259907291f36d47f2231b0d |
class V1QuobyteVolumeSource(object): <NEW_LINE> <INDENT> def __init__(self, volume=None, registry=None, user=None, group=None, read_only=None): <NEW_LINE> <INDENT> self.swagger_types = { 'volume': 'str', 'registry': 'str', 'user': 'str', 'group': 'str', 'read_only': 'bool' } <NEW_LINE> self.attribute_map = { 'volume': 'volume', 'registry': 'registry', 'user': 'user', 'group': 'group', 'read_only': 'readOnly' } <NEW_LINE> self._volume = volume <NEW_LINE> self._registry = registry <NEW_LINE> self._user = user <NEW_LINE> self._group = group <NEW_LINE> self._read_only = read_only <NEW_LINE> <DEDENT> @property <NEW_LINE> def volume(self): <NEW_LINE> <INDENT> return self._volume <NEW_LINE> <DEDENT> @volume.setter <NEW_LINE> def volume(self, volume): <NEW_LINE> <INDENT> self._volume = volume <NEW_LINE> <DEDENT> @property <NEW_LINE> def registry(self): <NEW_LINE> <INDENT> return self._registry <NEW_LINE> <DEDENT> @registry.setter <NEW_LINE> def registry(self, registry): <NEW_LINE> <INDENT> self._registry = registry <NEW_LINE> <DEDENT> @property <NEW_LINE> def user(self): <NEW_LINE> <INDENT> return self._user <NEW_LINE> <DEDENT> @user.setter <NEW_LINE> def user(self, user): <NEW_LINE> <INDENT> self._user = user <NEW_LINE> <DEDENT> @property <NEW_LINE> def group(self): <NEW_LINE> <INDENT> return self._group <NEW_LINE> <DEDENT> @group.setter <NEW_LINE> def group(self, group): <NEW_LINE> <INDENT> self._group = group <NEW_LINE> <DEDENT> @property <NEW_LINE> def read_only(self): <NEW_LINE> <INDENT> return self._read_only <NEW_LINE> <DEDENT> @read_only.setter <NEW_LINE> def read_only(self, read_only): <NEW_LINE> <INDENT> self._read_only = read_only <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599072627d3e7fe0e08785 |
class Test_NC_ABI_L1B_ir_cal(unittest.TestCase): <NEW_LINE> <INDENT> @mock.patch('satpy.readers.abi_l1b.xr') <NEW_LINE> def setUp(self, xr_): <NEW_LINE> <INDENT> from satpy.readers.abi_l1b import NC_ABI_L1B <NEW_LINE> rad_data = (np.arange(10.).reshape((2, 5)) + 1.) * 50. <NEW_LINE> rad_data = (rad_data + 1.) / 0.5 <NEW_LINE> rad_data = rad_data.astype(np.int16) <NEW_LINE> rad = xr.DataArray( rad_data, attrs={ 'scale_factor': 0.5, 'add_offset': -1., '_FillValue': 1002., }) <NEW_LINE> xr_.open_dataset.return_value = FakeDataset({ 'band_id': np.array(8), 'Rad': rad, "planck_fk1": np.array(13432.1), "planck_fk2": np.array(1497.61), "planck_bc1": np.array(0.09102), "planck_bc2": np.array(0.99971), "esun": np.array(2017), "nominal_satellite_subpoint_lat": np.array(0.0), "nominal_satellite_subpoint_lon": np.array(-89.5), "nominal_satellite_height": np.array(35786.02), "earth_sun_distance_anomaly_in_AU": np.array(0.99)}, {}) <NEW_LINE> self.reader = NC_ABI_L1B('filename', {'platform_shortname': 'G16'}, {'filetype': 'info'}) <NEW_LINE> <DEDENT> def test_ir_calibrate(self): <NEW_LINE> <INDENT> from satpy import DatasetID <NEW_LINE> res = self.reader.get_dataset( DatasetID(name='C05', calibration='brightness_temperature'), {}) <NEW_LINE> expected = np.array([[267.55572248, 305.15576503, 332.37383249, 354.73895301, 374.19710115], [391.68679226, 407.74064808, 422.69329105, 436.77021913, np.nan]]) <NEW_LINE> self.assertTrue(np.allclose(res.data, expected, equal_nan=True)) <NEW_LINE> self.assertIn('scale_factor', res.attrs) <NEW_LINE> self.assertIn('_FillValue', res.attrs) <NEW_LINE> self.assertEqual(res.attrs['standard_name'], 'toa_brightness_temperature') | Test the NC_ABI_L1B reader. | 62599072460517430c432cd6 |
class RegressionMethods(MlData): <NEW_LINE> <INDENT> def __init__(self, dataFrame: pd.DataFrame, y_column: str, x_columns: list): <NEW_LINE> <INDENT> super().__init__(dataFrame, y_column, x_columns) <NEW_LINE> <DEDENT> def linear_regression(self): <NEW_LINE> <INDENT> self.rm = linear_model.LinearRegression(fit_intercept=True, normalize=False, copy_X=True, n_jobs=-1) <NEW_LINE> self.rm.fit(self.x_values(), self.y_values()) <NEW_LINE> self.coef = pd.DataFrame({"Name": self.x_columns, "Coefficients": self.rm.coef_}) <NEW_LINE> self.intercept = self.rm.intercept_ <NEW_LINE> self.predict = lambda x: self.rm.predict(x) <NEW_LINE> <DEDENT> def svr(self): <NEW_LINE> <INDENT> self.rm = svm.SVR(kernel='rbf', C=1, gamma=0.1) <NEW_LINE> self.rm.fit(self.x_values(), self.y_values()) <NEW_LINE> self.predict = lambda x: self.rm.predict(x) <NEW_LINE> <DEDENT> def random_forest_regression(self): <NEW_LINE> <INDENT> self.rm = RandomForestRegressor(n_estimators=100, criterion="mse", max_features="auto", n_jobs=-1) <NEW_LINE> self.rm.fit(self.x_values(), self.y_values()) <NEW_LINE> features_importance_rank = np.argsort(self.rm.feature_importances_)[::-1] <NEW_LINE> features_importance_value = self.rm.feature_importances_[features_importance_rank] <NEW_LINE> features_importance_key = self.data[features_importance_rank].keys() <NEW_LINE> importance = pd.DataFrame( { "key": features_importance_key, "value": features_importance_value } ) <NEW_LINE> sns.barplot(x='value', y='key', data=importance) <NEW_LINE> plt.show() <NEW_LINE> <DEDENT> def analysis(self): <NEW_LINE> <INDENT> eq = self.y_column + "~" + "+".join(self.x_columns) <NEW_LINE> self.rm = smf.ols(formula=eq, data=self.data).fit() <NEW_LINE> print(self.rm.summary()) <NEW_LINE> tsa.adfuller(self.rm.resid, regression='nc') <NEW_LINE> autocorrelation_plot(self.rm.resid) <NEW_LINE> ACF_resid = tsa.acf(self.rm.resid) <NEW_LINE> VIF = pd.DataFrame( [oti.variance_inflation_factor(self.rm.model.exog, i) for i in range(1, self.rm.model.exog.shape[1])], index=self.rm.model.exog_names[1:], columns=['VIF']) | 線形重回帰分析 | 62599072283ffb24f3cf51a7 |
class CreateToolTip(object): <NEW_LINE> <INDENT> def __init__(self, widget, text='widget info'): <NEW_LINE> <INDENT> self.waittime = 500 <NEW_LINE> self.wraplength = 180 <NEW_LINE> self.widget = widget <NEW_LINE> self.text = text <NEW_LINE> self.widget.bind("<Enter>", self.enter) <NEW_LINE> self.widget.bind("<Leave>", self.leave) <NEW_LINE> self.widget.bind("<ButtonPress>", self.leave) <NEW_LINE> self.id = None <NEW_LINE> self.tw = None <NEW_LINE> <DEDENT> def enter(self, event=None): <NEW_LINE> <INDENT> self.schedule() <NEW_LINE> <DEDENT> def leave(self, event=None): <NEW_LINE> <INDENT> self.unschedule() <NEW_LINE> self.hidetip() <NEW_LINE> <DEDENT> def schedule(self): <NEW_LINE> <INDENT> self.unschedule() <NEW_LINE> self.id = self.widget.after(self.waittime, self.showtip) <NEW_LINE> <DEDENT> def unschedule(self): <NEW_LINE> <INDENT> id = self.id <NEW_LINE> self.id = None <NEW_LINE> if id: <NEW_LINE> <INDENT> self.widget.after_cancel(id) <NEW_LINE> <DEDENT> <DEDENT> def showtip(self, event=None): <NEW_LINE> <INDENT> x = y = 0 <NEW_LINE> x, y, cx, cy = self.widget.bbox("insert") <NEW_LINE> x += self.widget.winfo_rootx() + 25 <NEW_LINE> y += self.widget.winfo_rooty() + 20 <NEW_LINE> self.tw = tk.Toplevel(self.widget) <NEW_LINE> self.tw.wm_overrideredirect(True) <NEW_LINE> self.tw.wm_geometry("+%d+%d" % (x, y)) <NEW_LINE> label = tk.Label(self.tw, text=self.text, justify='left', background="#ffffb0", relief='solid', borderwidth=1, wraplength = self.wraplength) <NEW_LINE> label.pack(ipadx=1) <NEW_LINE> <DEDENT> def hidetip(self): <NEW_LINE> <INDENT> tw = self.tw <NEW_LINE> self.tw= None <NEW_LINE> if tw: <NEW_LINE> <INDENT> tw.destroy() | create a tooltip for a given widget | 625990725fc7496912d48ee8 |
class Section(Region): <NEW_LINE> <INDENT> _subsection_locator = (By.CSS_SELECTOR, '.subunit > li') <NEW_LINE> @property <NEW_LINE> def number(self) -> str: <NEW_LINE> <INDENT> line = self.root.text.split(". ", 1) <NEW_LINE> return line[0] if len(line) > 1 else "" <NEW_LINE> <DEDENT> @property <NEW_LINE> def subsections(self) -> List[TableOfContents.Section.Unit]: <NEW_LINE> <INDENT> return [self.Unit(self, option) for option in self.find_elements(*self._subsection_locator)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self) -> str: <NEW_LINE> <INDENT> return self.root.text.split(". ", 1)[-1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def unnumbered(self) -> bool: <NEW_LINE> <INDENT> return self.number == "" <NEW_LINE> <DEDENT> class Unit(Region): <NEW_LINE> <INDENT> _section_link_locator = (By.CSS_SELECTOR, 'a') <NEW_LINE> @property <NEW_LINE> def number(self) -> str: <NEW_LINE> <INDENT> line = self.root.text.split(". ", 1) <NEW_LINE> return line[0] if bool(line) else "" <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self) -> str: <NEW_LINE> <INDENT> return self.find_element(*self._section_link_locator).text <NEW_LINE> <DEDENT> @property <NEW_LINE> def unnumbered(self) -> bool: <NEW_LINE> <INDENT> return self.number == "" <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self) -> str: <NEW_LINE> <INDENT> return (self.find_element(*self._section_link_locator) .get_attribute("href")) | A table of contents chapter or other primary content. | 625990727d847024c075dcd8 |
class MockFile(io.BytesIO, object): <NEW_LINE> <INDENT> def __init__(self, path, content=b""): <NEW_LINE> <INDENT> global mock_files <NEW_LINE> mock_files[path] = self <NEW_LINE> self._value_after_close = "" <NEW_LINE> self._super = super(MockFile, self) <NEW_LINE> self._super.__init__(content) <NEW_LINE> <DEDENT> def getvalue(self): <NEW_LINE> <INDENT> if not self.closed: <NEW_LINE> <INDENT> return self._super.getvalue() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._value_after_close <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> if not self.closed: <NEW_LINE> <INDENT> self._value_after_close = self._super.getvalue() <NEW_LINE> <DEDENT> self._super.close() | Mock class for the file objects _contained in_ `FTPFile` objects
(not `FTPFile` objects themselves!).
Contrary to `StringIO.StringIO` instances, `MockFile` objects can
be queried for their contents after they have been closed. | 625990724428ac0f6e659e33 |
class Communicator(metaclass=ABCMeta): <NEW_LINE> <INDENT> pass <NEW_LINE> @abstractmethod <NEW_LINE> def sees(self, actor: Actor, previous_actor: Actor = None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def no_longer_sees(self, actor: Actor): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> pass | Sends messages from the Vehicle to Actors. The medium to send the message is defined by subclass implementations. | 625990729c8ee82313040e06 |
class StoreLocation(glance_store.location.StoreLocation): <NEW_LINE> <INDENT> def process_specs(self): <NEW_LINE> <INDENT> self.scheme = self.specs.get('scheme', 'http') <NEW_LINE> self.netloc = self.specs['netloc'] <NEW_LINE> self.user = self.specs.get('user') <NEW_LINE> self.password = self.specs.get('password') <NEW_LINE> self.path = self.specs.get('path') <NEW_LINE> <DEDENT> def _get_credstring(self): <NEW_LINE> <INDENT> if self.user: <NEW_LINE> <INDENT> return '%s:%s@' % (self.user, self.password) <NEW_LINE> <DEDENT> return '' <NEW_LINE> <DEDENT> def get_uri(self): <NEW_LINE> <INDENT> return "%s://%s%s%s" % ( self.scheme, self._get_credstring(), self.netloc, self.path) <NEW_LINE> <DEDENT> def parse_uri(self, uri): <NEW_LINE> <INDENT> pieces = urlparse.urlparse(uri) <NEW_LINE> assert pieces.scheme in ('https', 'http') <NEW_LINE> self.scheme = pieces.scheme <NEW_LINE> netloc = pieces.netloc <NEW_LINE> path = pieces.path <NEW_LINE> try: <NEW_LINE> <INDENT> if '@' in netloc: <NEW_LINE> <INDENT> creds, netloc = netloc.split('@') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> creds = None <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> if '@' in path: <NEW_LINE> <INDENT> creds, path = path.split('@') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> creds = None <NEW_LINE> <DEDENT> <DEDENT> if creds: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.user, self.password = creds.split(':') <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> reason = _("Credentials are not well-formatted.") <NEW_LINE> LOG.info(reason) <NEW_LINE> raise exceptions.BadStoreUri(message=reason) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.user = None <NEW_LINE> <DEDENT> if netloc == '': <NEW_LINE> <INDENT> LOG.info(_("No address specified in HTTP URL")) <NEW_LINE> raise exceptions.BadStoreUri(uri=uri) <NEW_LINE> <DEDENT> self.netloc = netloc <NEW_LINE> self.path = path | Class describing an HTTP(S) URI | 62599072baa26c4b54d50bab |
class LoggedInMixin: <NEW_LINE> <INDENT> def logged_in(self, obj: MagicLinkUse) -> bool: <NEW_LINE> <INDENT> return obj.timestamp == obj.link.logged_in_at <NEW_LINE> <DEDENT> logged_in.boolean = True <NEW_LINE> logged_in.short_description = "Used for login" | Mixin used to provide a logged_in method for display purposes. | 62599073f9cc0f698b1c5f4a |
class PlusGate(cirq.Gate): <NEW_LINE> <INDENT> def __init__(self, dimension, increment=1): <NEW_LINE> <INDENT> self.dimension = dimension <NEW_LINE> self.increment = increment % dimension <NEW_LINE> <DEDENT> def _qid_shape_(self): <NEW_LINE> <INDENT> return (self.dimension,) <NEW_LINE> <DEDENT> def _unitary_(self): <NEW_LINE> <INDENT> inc = (self.increment - 1) % self.dimension + 1 <NEW_LINE> u = np.empty((self.dimension, self.dimension)) <NEW_LINE> u[inc:] = np.eye(self.dimension)[:-inc] <NEW_LINE> u[:inc] = np.eye(self.dimension)[-inc:] <NEW_LINE> return u | A qudit gate that increments a qudit state mod its dimension. | 62599073d486a94d0ba2d8b8 |
class DataGenerator(keras.utils.Sequence): <NEW_LINE> <INDENT> def __init__(self, image_filenames_0, image_filenames_1, labels, batch_size): <NEW_LINE> <INDENT> self.image_filenames_0, self.image_filenames_1, self.labels = image_filenames_0, image_filenames_1, labels <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.is_mat = self.labels[0].endswith('mat') <NEW_LINE> self.max_idx = len(labels)//batch_size <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.max_idx <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> batch_x_0_names = self.image_filenames_0[index * self.batch_size: (index + 1) * self.batch_size] <NEW_LINE> batch_x_1_names = self.image_filenames_1[index * self.batch_size: (index + 1) * self.batch_size] <NEW_LINE> batch_y_names = self.labels[index * self.batch_size: (index + 1) * self.batch_size] <NEW_LINE> batch_x = np.zeros((self.batch_size, 128, 128, 2)) <NEW_LINE> batch_x[:, :, :, 0] = np.array([imread(file_name) for file_name in batch_x_0_names]) <NEW_LINE> batch_x[:, :, :, 1] = np.array([imread(file_name) for file_name in batch_x_1_names]) <NEW_LINE> batch_y = np.zeros((self.batch_size, 8)) <NEW_LINE> if self.is_mat: <NEW_LINE> <INDENT> for i in range(self.batch_size): <NEW_LINE> <INDENT> batch_y[i, :] = sio.loadmat(batch_y_names[i])['homography'].T <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(self.batch_size): <NEW_LINE> <INDENT> file = open(batch_y_names[i]) <NEW_LINE> lines = file.readlines() <NEW_LINE> file.close() <NEW_LINE> ts = [] <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> ts.append(float(line[:-1])) <NEW_LINE> <DEDENT> batch_y[i, :] = np.asarray(ts) <NEW_LINE> <DEDENT> <DEDENT> batch_x = np.divide(np.subtract(batch_x, 127.5), 127.5) <NEW_LINE> batch_y = np.divide(batch_y, 32) <NEW_LINE> return batch_x, batch_y | Generates data for Keras | 625990737d43ff2487428092 |
class TransformerDecoder(Module): <NEW_LINE> <INDENT> __constants__ = ['norm'] <NEW_LINE> def __init__(self, decoder_layer, num_layers, norm=None): <NEW_LINE> <INDENT> super(TransformerDecoder, self).__init__() <NEW_LINE> self.layers = _get_clones(decoder_layer, num_layers) <NEW_LINE> self.num_layers = num_layers <NEW_LINE> self.norm = norm <NEW_LINE> <DEDENT> def forward(self, tgt, memory, memory2=None, tgt_mask=None, memory_mask=None, tgt_key_padding_mask=None, memory_key_padding_mask=None): <NEW_LINE> <INDENT> output = tgt <NEW_LINE> for mod in self.layers: <NEW_LINE> <INDENT> output = mod(output, memory, memory2=memory2, tgt_mask=tgt_mask, memory_mask=memory_mask, tgt_key_padding_mask=tgt_key_padding_mask, memory_key_padding_mask=memory_key_padding_mask) <NEW_LINE> <DEDENT> if self.norm is not None: <NEW_LINE> <INDENT> output = self.norm(output) <NEW_LINE> <DEDENT> return output | TransformerDecoder is a stack of N decoder layers
Args:
decoder_layer: an instance of the TransformerDecoderLayer() class (required).
num_layers: the number of sub-decoder-layers in the decoder (required).
norm: the layer normalization component (optional).
Examples::
>>> decoder_layer = nn.TransformerDecoderLayer(d_model=512, nhead=8)
>>> transformer_decoder = nn.TransformerDecoder(decoder_layer, num_layers=6)
>>> memory = torch.rand(10, 32, 512)
>>> tgt = torch.rand(20, 32, 512)
>>> out = transformer_decoder(tgt, memory) | 62599073aad79263cf4300b5 |
class ToCollect(Process): <NEW_LINE> <INDENT> def run(self,mon1,mon2,tal1,tal2): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> yield hold,self,1 <NEW_LINE> mon1.observe(self.sim.now()) <NEW_LINE> mon2.observe(self.sim.now()) <NEW_LINE> tal1.observe(self.sim.now()) <NEW_LINE> tal2.observe(self.sim.now()) | For testing startCollection
| 6259907399cbb53fe68327e9 |
class NestedParameter(BaseParameter): <NEW_LINE> <INDENT> spec_attributes = [ "layout", "columns"] <NEW_LINE> type = 'nested' <NEW_LINE> layout = 'vertical' <NEW_LINE> fields = None <NEW_LINE> columns = None <NEW_LINE> def __init__(self, name, fields, **kwargs): <NEW_LINE> <INDENT> BaseParameter.__init__(self, fields=fields, name=name, **kwargs) <NEW_LINE> if self.columns is None: <NEW_LINE> <INDENT> num_visible_fields = len( [field for field in fields if not field.hide]) <NEW_LINE> if num_visible_fields >= 4: <NEW_LINE> <INDENT> self.columns = 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.columns = 1 <NEW_LINE> <DEDENT> <DEDENT> if self.columns > 4: <NEW_LINE> <INDENT> config.error( "UI only support up to 4 columns in nested parameters") <NEW_LINE> <DEDENT> self.setParent(None) <NEW_LINE> <DEDENT> def setParent(self, parent): <NEW_LINE> <INDENT> BaseParameter.setParent(self, parent) <NEW_LINE> for field in self.fields: <NEW_LINE> <INDENT> field.setParent(self) <NEW_LINE> <DEDENT> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def collectChildProperties(self, kwargs, properties, collector, **kw): <NEW_LINE> <INDENT> childProperties = {} <NEW_LINE> for field in self.fields: <NEW_LINE> <INDENT> yield collector.collectValidationErrors(field.fullName, field.updateFromKwargs, kwargs=kwargs, properties=childProperties, collector=collector, **kw) <NEW_LINE> <DEDENT> kwargs[self.fullName] = childProperties <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def updateFromKwargs(self, kwargs, properties, collector, **kw): <NEW_LINE> <INDENT> yield self.collectChildProperties(kwargs=kwargs, properties=properties, collector=collector, **kw) <NEW_LINE> if self.name: <NEW_LINE> <INDENT> d = properties.setdefault(self.name, {}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d = properties <NEW_LINE> <DEDENT> d.update(kwargs[self.fullName]) <NEW_LINE> <DEDENT> def getSpec(self): <NEW_LINE> <INDENT> ret = BaseParameter.getSpec(self) <NEW_LINE> ret['fields'] = sorted([field.getSpec() for field in self.fields], key=lambda x: x['name']) <NEW_LINE> return ret | A 'parent' parameter for a set of related parameters. This provides a
logical grouping for the child parameters.
Typically, the 'fullName' of the child parameters mix in the parent's
'fullName'. This allows for a field to appear multiple times in a form
(for example, two codebases each have a 'branch' field).
If the 'name' of the parent is the empty string, then the parent's name
does not mix in with the child 'fullName'. This is useful when a field
will not appear multiple time in a scheduler but the logical grouping is
helpful.
The result of a NestedParameter is typically a dictionary, with the key/value
being the name/value of the children. | 6259907363b5f9789fe86a63 |
class DriversMixin(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_available_drivers(): <NEW_LINE> <INDENT> return [driver['username'] for driver in db.drivers.find({"duty": True, "trip": False}, {'username': 1, '_id': 0})] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_positions(drivers_names): <NEW_LINE> <INDENT> return db.positions.find({'username': {'$in': drivers_names}}, {'username': 1, 'latitude': 1, 'longitude': 1, '_id': 0}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def distance(origin, destination): <NEW_LINE> <INDENT> lat1, lon1 = origin <NEW_LINE> lat2, lon2 = destination <NEW_LINE> radius = 6371 <NEW_LINE> dlat = math.radians(lat2 - lat1) <NEW_LINE> dlon = math.radians(lon2 - lon1) <NEW_LINE> aux = math.sin(dlat / 2) * math.sin(dlat / 2) + math.cos(math.radians(lat1)) * math.cos(math.radians(lat2)) * math.sin(dlon / 2) * math.sin( dlon / 2) <NEW_LINE> unscaled_distance = 2 * math.atan2(math.sqrt(aux), math.sqrt(1 - aux)) <NEW_LINE> final_distance = radius * unscaled_distance <NEW_LINE> return final_distance <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_closer_driver(location): <NEW_LINE> <INDENT> drivers_names = list(DriversMixin.get_available_drivers()) <NEW_LINE> drivers_with_position = DriversMixin.get_positions(drivers_names) <NEW_LINE> drivers_with_distance = [(driver['username'], DriversMixin.distance((driver['latitude'], driver['longitude']), location)) for driver in drivers_with_position] <NEW_LINE> driver = reduce(lambda x, y: x if x[1] < y[1] else y, drivers_with_distance, (None, float("inf"))) <NEW_LINE> return driver[0] | Utility class for anything related with drivers | 6259907376e4537e8c3f0e7e |
class StopTaskAction(TaskAction): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> job = self.state.get_job(self.task) <NEW_LINE> if not job: <NEW_LINE> <INDENT> logger.error("%s: job does not exist" % self) <NEW_LINE> <DEDENT> elif not self.machine.stop_task(job): <NEW_LINE> <INDENT> logger.error("%s: failed to stop task" % self) | Stop a task. | 62599073ad47b63b2c5a914d |
class Kgate(Gate): <NEW_LINE> <INDENT> def __init__(self, kappa): <NEW_LINE> <INDENT> super().__init__([kappa]) <NEW_LINE> <DEDENT> def _apply(self, reg, backend, **kwargs): <NEW_LINE> <INDENT> p = _unwrap(self.p) <NEW_LINE> backend.kerr_interaction(p[0], *reg) | :ref:`Kerr <kerr>` gate.
.. math::
K(\kappa) = e^{i \kappa \hat{n}^2}
Args:
kappa (float): parameter | 62599073fff4ab517ebcf11a |
class File(CountableBase): <NEW_LINE> <INDENT> __tablename__ = 'file' <NEW_LINE> __table_args__ = {'sqlite_autoincrement': True} <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> created_at = Column(Integer, default=0) <NEW_LINE> updated_at = Column(Integer, default=0) <NEW_LINE> path = Column(String(255)) <NEW_LINE> size = Column(Integer, default=0) <NEW_LINE> @classmethod <NEW_LINE> def find(cls, path): <NEW_LINE> <INDENT> return S.query(cls).filter_by(path=path) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def remove(cls, path): <NEW_LINE> <INDENT> S.query(cls).filter_by(path=path).delete() | File Schema
All recorded files | 625990733346ee7daa3382df |
class SwishBeta(Layer): <NEW_LINE> <INDENT> def __init__(self, beta=1, **kwargs): <NEW_LINE> <INDENT> super(Swish, self).__init__(**kwargs) <NEW_LINE> self.supports_masking = True <NEW_LINE> self.beta_initializer = K.cast_to_floatx(beta) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> input_dim = input_shape[-1] <NEW_LINE> self.beta = self.add_weight((1,), initializer=self.beta_initializer, name='beta') <NEW_LINE> self.input_spec = InputSpec(dtype=K.floatx(), min_ndim=2, axes={-1: input_dim}) <NEW_LINE> self.build = True <NEW_LINE> <DEDENT> def call(self, inputs): <NEW_LINE> <INDENT> return inputs * K.sigmoid(self.beta*inputs) | Self-gated activation function with trainable beta
f(x) = x \sigma(eta x)
# References
Swish: a Self-Gated Activation Function https://arxiv.org/abs/1710.05941v1 | 625990734a966d76dd5f07ea |
class HomeTestCase(TestCase): <NEW_LINE> <INDENT> fixtures = ['sample_data.json'] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> <DEDENT> def test_get_home_view_returns_success(self): <NEW_LINE> <INDENT> response = self.client.get( reverse('home') ) <NEW_LINE> self.assertEquals(response.status_code, 200) <NEW_LINE> <DEDENT> def test_home_redirects_to_dashboard_if_user_is_authenticated(self): <NEW_LINE> <INDENT> self.client.login(username='uzo', password='tia') <NEW_LINE> response = self.client.get(reverse('home')) <NEW_LINE> self.assertEquals(response.status_code, 302) | Testcase for the Home View . | 62599073a17c0f6771d5d82b |
class RecordFilter(object): <NEW_LINE> <INDENT> filter_param = '' <NEW_LINE> name_template = '' <NEW_LINE> def __init__(self, filter_value): <NEW_LINE> <INDENT> self.filter_value = filter_value <NEW_LINE> self.filter_name = self.name_template.format(self.filter_value) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.filter_value == other.filter_value <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return self.filter_value != other.filter_value <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.filter_value) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if isinstance(self, type(other)): <NEW_LINE> <INDENT> return self.filter_value < other.filter_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.filter_name < other.filter_name <NEW_LINE> <DEDENT> <DEDENT> def filter_records(self, record_list, search_term=None): <NEW_LINE> <INDENT> filtered_list = [] <NEW_LINE> for record in record_list: <NEW_LINE> <INDENT> if getattr(record, self.filter_param) == self.filter_value: <NEW_LINE> <INDENT> if search_term: <NEW_LINE> <INDENT> if record.has_term(search_term.lower()): <NEW_LINE> <INDENT> filtered_list.append(record) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> filtered_list.append(record) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return filtered_list | Superclass of all UsageRecord filters. Use the subclasses for each of
access to standard parameters.
Class Attributes:
RecordFilter.filter_param (str): This is the parameter name of
the UsageRecord object,
filtered by this filter
RecordFilter.name_template (str): This template is used to create
a human-readable name for the filter
Attributes:
filter_value (str): Any UsageRecord object that
object.filter_param == filter_value,
will pass the filter
filter_name (str): Human-readable name for this filter. | 62599073091ae35668706538 |
class TakeWhileEnumerable(Enumerable): <NEW_LINE> <INDENT> def __init__(self, enumerable, predicate): <NEW_LINE> <INDENT> super(TakeWhileEnumerable, self).__init__(enumerable) <NEW_LINE> self.predicate = predicate <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return itertools.takewhile(self.predicate, self._iterable) | Class to hold state for taking elements while a given predicate is true | 62599073442bda511e95d9d7 |
class FactBase: <NEW_LINE> <INDENT> pass | stores unary and binary relational facts | 62599073e76e3b2f99fda303 |
@registry.register_problem("wmt_enfr_tokens_8k") <NEW_LINE> class WMTEnFrTokens8k(WMTProblem): <NEW_LINE> <INDENT> @property <NEW_LINE> def targeted_vocab_size(self): <NEW_LINE> <INDENT> return 2**13 <NEW_LINE> <DEDENT> def train_generator(self, data_dir, tmp_dir, train): <NEW_LINE> <INDENT> symbolizer_vocab = generator_utils.get_or_generate_vocab( data_dir, tmp_dir, self.vocab_file, self.targeted_vocab_size) <NEW_LINE> datasets = _ENFR_TRAIN_DATASETS if train else _ENFR_TEST_DATASETS <NEW_LINE> tag = "train" if train else "dev" <NEW_LINE> data_path = _compile_data(tmp_dir, datasets, "wmt_enfr_tok_%s" % tag) <NEW_LINE> return token_generator(data_path + ".lang1", data_path + ".lang2", symbolizer_vocab, EOS) <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_space_id(self): <NEW_LINE> <INDENT> return problem.SpaceID.EN_TOK <NEW_LINE> <DEDENT> @property <NEW_LINE> def target_space_id(self): <NEW_LINE> <INDENT> return problem.SpaceID.FR_TOK | Problem spec for WMT En-Fr translation. | 62599073f548e778e596ce8e |
class ComputeReservoir(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.enabled = True <NEW_LINE> self.checked = False <NEW_LINE> <DEDENT> def onClick(self): <NEW_LINE> <INDENT> pass | Implementation for PyFile_addin.ComputeReservoirButton (Button) | 62599073009cb60464d02e3a |
class deeds(object): <NEW_LINE> <INDENT> def __init__(self, owner=None, purchaseprice=0, mortgageValue=0, unmortage=0): <NEW_LINE> <INDENT> super(deeds, self).__init__() <NEW_LINE> self.m_owner = owner <NEW_LINE> self.m_purchasePrice = purchaseprice <NEW_LINE> self.m_mortgageValue = mortgageValue <NEW_LINE> self.m_unmortage = unmortage <NEW_LINE> <DEDENT> def showDeedInfo(self): <NEW_LINE> <INDENT> print("## Deed Info ##") <NEW_LINE> if not (self.m_owner is None): <NEW_LINE> <INDENT> print("Owner: " + str(self.m_owner)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Not belong to anyone.") <NEW_LINE> <DEDENT> print("Purchase Price: " + str(self.m_purchasePrice)) <NEW_LINE> print("Mortgage Value: " + str(self.m_mortgageValue)) <NEW_LINE> print("Un-mortgage Price: " + str(self.m_unmortage)) | deeds of properties | 6259907355399d3f05627e19 |
class AclController(Controller): <NEW_LINE> <INDENT> def GET(self, req): <NEW_LINE> <INDENT> resp = req.get_response(self.app, method='HEAD') <NEW_LINE> return get_acl(req.access_key, resp.headers) <NEW_LINE> <DEDENT> def PUT(self, req): <NEW_LINE> <INDENT> if req.object_name: <NEW_LINE> <INDENT> raise S3NotImplemented() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> translated_acl = swift_acl_translate(req.body, xml=True) <NEW_LINE> if translated_acl == 'NotImplemented': <NEW_LINE> <INDENT> raise S3NotImplemented() <NEW_LINE> <DEDENT> elif translated_acl == 'InvalidArgument': <NEW_LINE> <INDENT> raise MalformedACLError() <NEW_LINE> <DEDENT> for header, acl in translated_acl: <NEW_LINE> <INDENT> req.headers[header] = acl <NEW_LINE> <DEDENT> resp = req.get_response(self.app) <NEW_LINE> resp.status = HTTP_OK <NEW_LINE> resp.headers.update({'Location': req.container_name}) <NEW_LINE> return resp | Handles the following APIs:
- GET Bucket acl
- PUT Bucket acl
- GET Object acl
- PUT Object acl
Those APIs are logged as ACL operations in the S3 server log. | 625990734e4d562566373d08 |
class Em(ReplaceTagNode): <NEW_LINE> <INDENT> verbose_name = 'Italic' <NEW_LINE> open_pattern = re.compile(patterns.no_argument % 'i', re.I) <NEW_LINE> close_pattern = re.compile(patterns.closing % 'i', re.I) | Makes text italic.
Usage:
[code lang=bbdocs linenos=0][i]Text[/i][/code] | 62599073167d2b6e312b8210 |
class LOOM_OT_render_terminal(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "loom.render_terminal" <NEW_LINE> bl_label = "Render Image Sequence in Terminal Instance" <NEW_LINE> bl_options = {'REGISTER', 'INTERNAL'} <NEW_LINE> frames: bpy.props.StringProperty( name="Frames", description="Specify a range or frames to render") <NEW_LINE> threads: bpy.props.IntProperty( name="CPU Threads", description="Number of CPU threads to use simultaneously while rendering", min = 1) <NEW_LINE> digits: bpy.props.IntProperty( name="Digits", description="Specify digits in filename", default=4) <NEW_LINE> isolate_numbers: bpy.props.BoolProperty( name="Filter Raw Items", description="Filter raw elements in frame input", default=False) <NEW_LINE> debug: bpy.props.BoolProperty( name="Debug Arguments", description="Print full argument list", default=False) <NEW_LINE> def determine_type(self, val): <NEW_LINE> <INDENT> if (isinstance(val, int)): <NEW_LINE> <INDENT> return ("chi") <NEW_LINE> <DEDENT> elif (isinstance(val, float)): <NEW_LINE> <INDENT> return ("chf") <NEW_LINE> <DEDENT> if val in ["true", "false"]: <NEW_LINE> <INDENT> return ("chb") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ("chs") <NEW_LINE> <DEDENT> <DEDENT> def pack_arguments(self, lst): <NEW_LINE> <INDENT> return [{"idc": 0, "name": self.determine_type(i), "value": str(i)} for i in lst] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return not context.scene.render.is_movie_format <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> prefs = context.preferences.addons[__name__].preferences <NEW_LINE> if bpy.data.is_dirty: <NEW_LINE> <INDENT> bpy.ops.wm.save_as_mainfile(filepath=bpy.data.filepath) <NEW_LINE> <DEDENT> python_expr = ("import bpy;" + "bpy.ops.render.image_sequence(" + "frames='{fns}', isolate_numbers={iel}," + "render_silent={cli}, digits={lzs})").format( fns=self.frames, iel=self.isolate_numbers, cli=True, lzs=self.digits) <NEW_LINE> cli_args = ["-b", bpy.data.filepath, "--python-expr", python_expr] <NEW_LINE> if self.properties.is_property_set("threads"): <NEW_LINE> <INDENT> cli_args = cli_args + ["-t", "{}".format(self.threads)] <NEW_LINE> <DEDENT> bpy.ops.loom.run_terminal( debug_arguments=self.debug, terminal_instance=True, argument_collection=self.pack_arguments(cli_args), bash_name="loom-render-temp", force_bash = prefs.bash_flag) <NEW_LINE> return {"FINISHED"} | Render image sequence in terminal instance | 6259907355399d3f05627e1a |
class TestInfoCollection(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.all_tests = [] <NEW_LINE> <DEDENT> def AddTests(self, test_infos): <NEW_LINE> <INDENT> self.all_tests = test_infos <NEW_LINE> <DEDENT> def GetAvailableTests(self, annotations, exclude_annotations, name_filter): <NEW_LINE> <INDENT> available_tests = self.all_tests <NEW_LINE> available_tests = [t for t in available_tests if self._AnnotationIncludesTest(t, annotations)] <NEW_LINE> if annotations and len(annotations) == 1 and annotations[0] == 'SmallTest': <NEW_LINE> <INDENT> tests_without_annotation = [ t for t in self.all_tests if not tests_annotations.AnnotatedFunctions.GetTestAnnotations( t.qualified_name)] <NEW_LINE> test_names = [t.qualified_name for t in tests_without_annotation] <NEW_LINE> logging.warning('The following tests do not contain any annotation. ' 'Assuming "SmallTest":\n%s', '\n'.join(test_names)) <NEW_LINE> available_tests += tests_without_annotation <NEW_LINE> <DEDENT> if exclude_annotations: <NEW_LINE> <INDENT> excluded_tests = [t for t in available_tests if self._AnnotationIncludesTest(t, exclude_annotations)] <NEW_LINE> available_tests = list(set(available_tests) - set(excluded_tests)) <NEW_LINE> <DEDENT> available_tests = [t for t in available_tests if self._NameFilterIncludesTest(t, name_filter)] <NEW_LINE> return available_tests <NEW_LINE> <DEDENT> def _AnnotationIncludesTest(self, test_info, annotation_filter_list): <NEW_LINE> <INDENT> if not annotation_filter_list: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for annotation_filter in annotation_filter_list: <NEW_LINE> <INDENT> filters = annotation_filter.split('=') <NEW_LINE> if len(filters) == 2: <NEW_LINE> <INDENT> key = filters[0] <NEW_LINE> value_list = filters[1].split(',') <NEW_LINE> for value in value_list: <NEW_LINE> <INDENT> if tests_annotations.AnnotatedFunctions.IsAnnotated( key + ':' + value, test_info.qualified_name): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif tests_annotations.AnnotatedFunctions.IsAnnotated( annotation_filter, test_info.qualified_name): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def _NameFilterIncludesTest(self, test_info, name_filter): <NEW_LINE> <INDENT> return not name_filter or name_filter in test_info.qualified_name | A collection of TestInfo objects which facilitates filtering. | 6259907399fddb7c1ca63a54 |
class CanceledError(AlluxioError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> super(CanceledError, self).__init__(Status.CANCELED, message) | Exception indicating that an operation was cancelled (typically by the caller).
Args:
message (str): The error message. | 62599073ad47b63b2c5a914f |
class Condition: <NEW_LINE> <INDENT> __sql = None <NEW_LINE> __vals = None <NEW_LINE> def __init__(self, _condition): <NEW_LINE> <INDENT> sql_s = ana_condition_sql(_condition, '') <NEW_LINE> if sql_s is None: <NEW_LINE> <INDENT> self.__sql = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sql_len = len(sql_s) <NEW_LINE> if sql_len <= 2: <NEW_LINE> <INDENT> self.__sql = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if sql_s.startswith('('): <NEW_LINE> <INDENT> sql_s = sql_s[1:sql_len - 1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.__sql = sql_s <NEW_LINE> self.__vals = ana_condition_val(_condition, []) <NEW_LINE> <DEDENT> def get_sql(self): <NEW_LINE> <INDENT> return self.__sql <NEW_LINE> <DEDENT> def get_vals(self): <NEW_LINE> <INDENT> return self.__vals | 数据查询条件 | 62599073fff4ab517ebcf11c |
class DeliveryColissimoBlok(Blok): <NEW_LINE> <INDENT> version = version <NEW_LINE> author = "Franck BRET" <NEW_LINE> required = ['delivery'] <NEW_LINE> @classmethod <NEW_LINE> def import_declaration_module(cls): <NEW_LINE> <INDENT> import_declaration_module() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def reload_declaration_module(cls, reload): <NEW_LINE> <INDENT> import_declaration_module(reload=reload) <NEW_LINE> <DEDENT> def update_colissimo(self): <NEW_LINE> <INDENT> ca = self.registry.Delivery.Carrier.insert( name="Colissimo", code="COLISSIMO") <NEW_LINE> ca_cred = self.registry.Delivery.Carrier.Credential.insert() <NEW_LINE> for (name, product_code) in SERVICES: <NEW_LINE> <INDENT> self.registry.Delivery.Carrier.Service.Colissimo.insert( name=name, product_code=product_code, carrier=ca, credential=ca_cred) <NEW_LINE> <DEDENT> <DEDENT> def update(self, latest): <NEW_LINE> <INDENT> if latest is None: <NEW_LINE> <INDENT> self.update_colissimo() | Delivery blok
| 6259907392d797404e3897dc |
class BackupWarn(ConfigError): <NEW_LINE> <INDENT> pass | Backup warning. | 62599073aad79263cf4300b8 |
class IUserManualType(interface.Interface): <NEW_LINE> <INDENT> pass | user manual content type | 625990732c8b7c6e89bd50e9 |
class DeleteReferencesItem(FrozenClass): <NEW_LINE> <INDENT> def __init__(self, binary=None): <NEW_LINE> <INDENT> if binary is not None: <NEW_LINE> <INDENT> self._binary_init(binary) <NEW_LINE> self._freeze = True <NEW_LINE> return <NEW_LINE> <DEDENT> self.SourceNodeId = NodeId() <NEW_LINE> self.ReferenceTypeId = NodeId() <NEW_LINE> self.IsForward = True <NEW_LINE> self.TargetNodeId = ExpandedNodeId() <NEW_LINE> self.DeleteBidirectional = True <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> packet = [] <NEW_LINE> packet.append(self.SourceNodeId.to_binary()) <NEW_LINE> packet.append(self.ReferenceTypeId.to_binary()) <NEW_LINE> packet.append(uatype_Boolean.pack(self.IsForward)) <NEW_LINE> packet.append(self.TargetNodeId.to_binary()) <NEW_LINE> packet.append(uatype_Boolean.pack(self.DeleteBidirectional)) <NEW_LINE> return b''.join(packet) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_binary(data): <NEW_LINE> <INDENT> return DeleteReferencesItem(data) <NEW_LINE> <DEDENT> def _binary_init(self, data): <NEW_LINE> <INDENT> self.SourceNodeId = NodeId.from_binary(data) <NEW_LINE> self.ReferenceTypeId = NodeId.from_binary(data) <NEW_LINE> self.IsForward = uatype_Boolean.unpack(data.read(1))[0] <NEW_LINE> self.TargetNodeId = ExpandedNodeId.from_binary(data) <NEW_LINE> self.DeleteBidirectional = uatype_Boolean.unpack(data.read(1))[0] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'DeleteReferencesItem(' + 'SourceNodeId:' + str(self.SourceNodeId) + ', ' + 'ReferenceTypeId:' + str(self.ReferenceTypeId) + ', ' + 'IsForward:' + str(self.IsForward) + ', ' + 'TargetNodeId:' + str(self.TargetNodeId) + ', ' + 'DeleteBidirectional:' + str(self.DeleteBidirectional) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__ | A request to delete a node from the server address space.
:ivar SourceNodeId:
:vartype SourceNodeId: NodeId
:ivar ReferenceTypeId:
:vartype ReferenceTypeId: NodeId
:ivar IsForward:
:vartype IsForward: Boolean
:ivar TargetNodeId:
:vartype TargetNodeId: ExpandedNodeId
:ivar DeleteBidirectional:
:vartype DeleteBidirectional: Boolean | 625990737c178a314d78e86c |
class Fetcher(object): <NEW_LINE> <INDENT> def fetch(self, client, method, url, data=None, headers=None, json=True): <NEW_LINE> <INDENT> raise NotImplemented | Base class for Fetchers, which wrap and normalize the APIs of various HTTP
libraries.
(It's a slightly leaky abstraction designed to make testing easier.) | 62599073d268445f2663a7de |
class StreetLeader(models.Model): <NEW_LINE> <INDENT> clap_level = models.OneToOneField( ClapLevel, on_delete=models.CASCADE, verbose_name=_('nivel clap') ) <NEW_LINE> profile = models.OneToOneField( Profile, on_delete=models.CASCADE, verbose_name=_('perfil') ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s %s' % (self.profile.user.first_name, self.profile.user.last_name) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Lider de Calle') <NEW_LINE> verbose_name_plural = _('Líderes de Calle') | !
Clase que gestiona el perfil de los usuarios que pertenecen al nivel Líder de Calle
@author William Páez (wpaez at cenditel.gob.ve)
@copyright <a href='http://www.gnu.org/licenses/gpl-2.0.html'>GNU Public License versión 2 (GPLv2)</a> | 625990738e7ae83300eea993 |
class TestStringParameter2b(SimulationTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> SimulationTest.setup_class_base('StringTests.mo', 'StringTests.TestStringParameterScalar1', version="2.0") <NEW_LINE> <DEDENT> @testattr(stddist_full = True) <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.setup_base() <NEW_LINE> <DEDENT> @testattr(stddist_full = True) <NEW_LINE> def test_trajectories(self): <NEW_LINE> <INDENT> assertString(self.model, "pi", "string") <NEW_LINE> assertString(self.model, "pd", "string1") <NEW_LINE> setString(self.model, "pi", "something") <NEW_LINE> assertString(self.model, "pi", "something") <NEW_LINE> assertString(self.model, "pd", "something1") <NEW_LINE> setString(self.model, "pi", "somethingelse") <NEW_LINE> self.run() <NEW_LINE> assertString(self.model, "pi", "somethingelse") <NEW_LINE> assertString(self.model, "pd", "somethingelse1") | Basic test of string parameter. FMI2.0. | 62599073e5267d203ee6d03e |
@python_2_unicode_compatible <NEW_LINE> class BaseLink(models.Model, TagSearchable): <NEW_LINE> <INDENT> LINK_TYPE_EXTERNAL = 1 <NEW_LINE> LINK_TYPE_EMAIL = 2 <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> link_type = models.PositiveIntegerField(_('Link Type'), blank=True) <NEW_LINE> title = models.CharField(_('Title'), max_length=100, help_text=_('Enter a title for this link')) <NEW_LINE> email = models.EmailField(_('Email'), blank=True, help_text=_('Enter a valid email address')) <NEW_LINE> external_url = models.URLField(_('URL'), blank=True, help_text=_('Enter a valid URL, including scheme (e.g. http://)')) <NEW_LINE> tags = TaggableManager(help_text=None, blank=True, verbose_name=_('Tags')) <NEW_LINE> objects = LinkQuerySet.as_manager() <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> abstract = True <NEW_LINE> verbose_name = _('Link') <NEW_LINE> verbose_name_plural = _('Links') <NEW_LINE> ordering = ('title',) <NEW_LINE> <DEDENT> search_fields = ( index.SearchField('title', partial_match=True, boost=10), index.SearchField('get_tags', partial_match=True, boost=10), index.SearchField('external_url'), index.SearchField('email'), index.FilterField('link_type'), ) <NEW_LINE> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> url = None <NEW_LINE> if self.link_type == self.LINK_TYPE_EMAIL: <NEW_LINE> <INDENT> url = self.email <NEW_LINE> <DEDENT> elif self.link_type == self.LINK_TYPE_EXTERNAL: <NEW_LINE> <INDENT> url = self.external_url <NEW_LINE> <DEDENT> return url <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{0}'.format(self.title) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> url = None <NEW_LINE> if self.link_type == self.LINK_TYPE_EMAIL: <NEW_LINE> <INDENT> url = 'mailto:{0}'.format(self.email) <NEW_LINE> <DEDENT> elif self.link_type == self.LINK_TYPE_EXTERNAL: <NEW_LINE> <INDENT> url = self.external_url <NEW_LINE> <DEDENT> return url <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def edit_handler(cls): <NEW_LINE> <INDENT> return ObjectList(cls.content_panels) | Abstract base class that stores either a URL or an email address. | 62599073a17c0f6771d5d82c |
class Relationship(Base): <NEW_LINE> <INDENT> __tablename__ = RELATIONSHIP_TABLE_NAME <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> up_regulated = Column(Boolean, nullable=False, index=True, doc='up-regulation or down-regulation') <NEW_LINE> description = Column(String(255), nullable=False, doc='This is a manually curated relationship') <NEW_LINE> mirna_id = Column(Integer, ForeignKey('{}.id'.format(MIRNA_TABLE_NAME))) <NEW_LINE> mirna = relationship('MiRNA') <NEW_LINE> disease_id = Column(Integer, ForeignKey('{}.id'.format(DISEASE_TABLE_NAME))) <NEW_LINE> disease = relationship('Disease') <NEW_LINE> def add_to_bel_graph(self, graph): <NEW_LINE> <INDENT> graph.add_qualified_edge( self.mirna.as_pybel(), self.disease.as_pybel(), relation=(POSITIVE_CORRELATION if self.up_regulated else NEGATIVE_CORRELATION), evidence=str(self.description), citation='18927107' ) | This class represents the miRNA disease relationship table | 62599073bf627c535bcb2dcf |
class LiquorViewItem(QTreeWidgetItem): <NEW_LINE> <INDENT> def __init__(self, ID: int, name: str, maker: str, category: str, sub_category: str, sub_sub_category: str, sub_sub_sub_category: str, cost: float, volume: float, alcohol_by_volume: float, aroma: str, color: str, origin: str, region: str): <NEW_LINE> <INDENT> self._id = ID <NEW_LINE> self._name = name <NEW_LINE> self._category = category <NEW_LINE> self._maker = maker <NEW_LINE> self._sub_category = sub_category <NEW_LINE> self._sub_sub_category = sub_sub_category <NEW_LINE> self._sub_sub_sub_category = sub_sub_sub_category <NEW_LINE> self._cost = cost <NEW_LINE> self._volume = volume <NEW_LINE> self._alcohol_by_volume = alcohol_by_volume <NEW_LINE> self._aroma = aroma <NEW_LINE> self._color = color <NEW_LINE> self._origin = origin <NEW_LINE> self._region = region <NEW_LINE> super(QTreeWidgetItem, self).__init__() <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def get_maker(self): <NEW_LINE> <INDENT> return self._maker <NEW_LINE> <DEDENT> def get_category(self): <NEW_LINE> <INDENT> return self._category <NEW_LINE> <DEDENT> def get_sub_category(self): <NEW_LINE> <INDENT> return self._sub_category <NEW_LINE> <DEDENT> def get_sub_sub_category(self): <NEW_LINE> <INDENT> return self._sub_sub_category <NEW_LINE> <DEDENT> def get_sub_sub_sub_category(self): <NEW_LINE> <INDENT> return self._sub_sub_sub_category <NEW_LINE> <DEDENT> def get_cost(self): <NEW_LINE> <INDENT> return self._cost <NEW_LINE> <DEDENT> def get_volume(self): <NEW_LINE> <INDENT> return self._volume <NEW_LINE> <DEDENT> def get_abv(self): <NEW_LINE> <INDENT> return self._alcohol_by_volume <NEW_LINE> <DEDENT> def get_aroma(self): <NEW_LINE> <INDENT> return self._aroma <NEW_LINE> <DEDENT> def get_color(self): <NEW_LINE> <INDENT> return self._color <NEW_LINE> <DEDENT> def get_origin(self): <NEW_LINE> <INDENT> return self._origin <NEW_LINE> <DEDENT> def get_region(self): <NEW_LINE> <INDENT> return self._region <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "ID: {}, NAME: {}".format(self._id, self._name) | "Alcohol" class of which is displayed in the LiquorView item
Arguments:
QTreeWidgetItem {QTreeWidgetItem} -- Inherits from QTreeViewItem | 625990732c8b7c6e89bd50ea |
class IntField(BaseField): <NEW_LINE> <INDENT> def __init__(self, min_value=None, max_value=None, **kwargs): <NEW_LINE> <INDENT> self.min_value, self.max_value = min_value, max_value <NEW_LINE> super(IntField, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.error('%s could not be converted to int' % value) <NEW_LINE> <DEDENT> if self.min_value is not None and value < self.min_value: <NEW_LINE> <INDENT> self.error('Integer value is too small') <NEW_LINE> <DEDENT> if self.max_value is not None and value > self.max_value: <NEW_LINE> <INDENT> self.error('Integer value is too large') <NEW_LINE> <DEDENT> <DEDENT> def prepare_query_value(self, op, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return super(IntField, self).prepare_query_value(op, int(value)) | 32-bit integer field. | 62599073009cb60464d02e3c |
class HeatBase(object): <NEW_LINE> <INDENT> __table_args__ = {'mysql_engine': 'InnoDB'} <NEW_LINE> __table_initialized__ = False <NEW_LINE> created_at = Column(DateTime, default=timeutils.utcnow) <NEW_LINE> updated_at = Column(DateTime, onupdate=timeutils.utcnow) <NEW_LINE> def save(self, session=None): <NEW_LINE> <INDENT> if not session: <NEW_LINE> <INDENT> session = Session.object_session(self) <NEW_LINE> if not session: <NEW_LINE> <INDENT> session = get_session() <NEW_LINE> <DEDENT> <DEDENT> session.add(self) <NEW_LINE> try: <NEW_LINE> <INDENT> session.flush() <NEW_LINE> <DEDENT> except IntegrityError as e: <NEW_LINE> <INDENT> if str(e).endswith('is not unique'): <NEW_LINE> <INDENT> raise exception.Duplicate(str(e)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def expire(self, session=None, attrs=None): <NEW_LINE> <INDENT> if not session: <NEW_LINE> <INDENT> session = Session.object_session(self) <NEW_LINE> if not session: <NEW_LINE> <INDENT> session = get_session() <NEW_LINE> <DEDENT> <DEDENT> session.expire(self, attrs) <NEW_LINE> <DEDENT> def refresh(self, session=None, attrs=None): <NEW_LINE> <INDENT> if not session: <NEW_LINE> <INDENT> session = Session.object_session(self) <NEW_LINE> if not session: <NEW_LINE> <INDENT> session = get_session() <NEW_LINE> <DEDENT> <DEDENT> session.refresh(self, attrs) <NEW_LINE> <DEDENT> def delete(self, session=None): <NEW_LINE> <INDENT> self.deleted = True <NEW_LINE> self.deleted_at = timeutils.utcnow() <NEW_LINE> if not session: <NEW_LINE> <INDENT> session = Session.object_session(self) <NEW_LINE> if not session: <NEW_LINE> <INDENT> session = get_session() <NEW_LINE> <DEDENT> <DEDENT> session.delete(self) <NEW_LINE> session.flush() <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> setattr(self, key, value) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return getattr(self, key) <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> return getattr(self, key, default) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self._i = iter(object_mapper(self).columns) <NEW_LINE> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> n = self._i.next().name <NEW_LINE> return n, getattr(self, n) <NEW_LINE> <DEDENT> def update(self, values): <NEW_LINE> <INDENT> for k, v in values.iteritems(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> <DEDENT> def update_and_save(self, values, session=None): <NEW_LINE> <INDENT> if not session: <NEW_LINE> <INDENT> session = Session.object_session(self) <NEW_LINE> if not session: <NEW_LINE> <INDENT> session = get_session() <NEW_LINE> <DEDENT> <DEDENT> session.begin() <NEW_LINE> for k, v in values.iteritems(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> session.commit() <NEW_LINE> <DEDENT> def iteritems(self): <NEW_LINE> <INDENT> local = dict(self) <NEW_LINE> joined = dict([(k, v) for k, v in self.__dict__.iteritems() if not k[0] == '_']) <NEW_LINE> local.update(joined) <NEW_LINE> return local.iteritems() | Base class for Heat Models. | 62599073f9cc0f698b1c5f4c |
class ErrorMsgTest(UnitTest): <NEW_LINE> <INDENT> def test_error_message(self): <NEW_LINE> <INDENT> PyQtAccounts.ErrorWindow.exec = lambda *args: PyQtAccounts.QMessageBox.Ok <NEW_LINE> def mock_Window(): <NEW_LINE> <INDENT> raise Exception("Error message!") <NEW_LINE> <DEDENT> self.monkeypatch.setattr("PyQtAccounts.Window", mock_Window) <NEW_LINE> msg = PyQtAccounts.main() <NEW_LINE> self.assertEqual( "Помилка!", msg.windowTitle(), "Error window title is incorrect!" ) <NEW_LINE> self.assertEqual( "Вибачте програма повинна припинити роботу через помилку.", msg.text(), "Error window message is incorrect!", ) <NEW_LINE> self.assertEqual( "Error message!", msg.detailedText(), "Error window details are incorrect!" ) | This class tests PyQtAccounts error messages. | 625990737d43ff2487428094 |
class Content(models.Model): <NEW_LINE> <INDENT> content = HtmlField("Content", widget_rows=40, blank=True) <NEW_LINE> search_fields = ("content",) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True | Provides a HTML field for managing general content and making it searchable. | 6259907367a9b606de547725 |
class _zone(BaseCommand): <NEW_LINE> <INDENT> SHORT_HELP = "Create, retrieve, update, and delete zone SOA data" <NEW_LINE> def run(self, args): <NEW_LINE> <INDENT> self._zoneAPI = self.nsone.zones() <NEW_LINE> self._zone = args['ZONE'] <NEW_LINE> if args['list']: <NEW_LINE> <INDENT> self.list() <NEW_LINE> <DEDENT> elif args['info']: <NEW_LINE> <INDENT> self.info() <NEW_LINE> <DEDENT> elif args['delete']: <NEW_LINE> <INDENT> self.delete(args) <NEW_LINE> <DEDENT> elif args['create']: <NEW_LINE> <INDENT> self.create(args) <NEW_LINE> <DEDENT> <DEDENT> def _printZoneModel(self, zdata): <NEW_LINE> <INDENT> if not self.isTextFormat(): <NEW_LINE> <INDENT> self.jsonOut(zdata) <NEW_LINE> return <NEW_LINE> <DEDENT> records = zdata.pop('records') <NEW_LINE> self.ppText(zdata) <NEW_LINE> if len(records) == 0: <NEW_LINE> <INDENT> self.out('NO RECORDS') <NEW_LINE> return <NEW_LINE> <DEDENT> self.out('RECORDS:') <NEW_LINE> longestRec = self._longest([r['domain'] for r in records]) <NEW_LINE> for r in records: <NEW_LINE> <INDENT> self.out(' %s %s %s' % (r['domain'].ljust(longestRec), r['type'].ljust(5), ', '.join(r['short_answers']))) <NEW_LINE> <DEDENT> <DEDENT> def create(self, args): <NEW_LINE> <INDENT> zdata = self._zoneAPI.create(self._zone, refresh=args['--refresh'], retry=args['--retry'], expiry=args['--expiry'], nx_ttl=args['--nx_ttl']) <NEW_LINE> self._printZoneModel(zdata) <NEW_LINE> <DEDENT> def delete(self, args): <NEW_LINE> <INDENT> self.checkWriteLock(args) <NEW_LINE> self._zoneAPI.delete(self._zone) <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> zdata = self._zoneAPI.retrieve(self._zone) <NEW_LINE> self._printZoneModel(zdata) <NEW_LINE> <DEDENT> def list(self): <NEW_LINE> <INDENT> zlist = self._zoneAPI.list() <NEW_LINE> if self.isTextFormat(): <NEW_LINE> <INDENT> for z in zlist: <NEW_LINE> <INDENT> if not self._zone or (self._zone and z['zone'] == self._zone): <NEW_LINE> <INDENT> self.out(z['zone']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._zone: <NEW_LINE> <INDENT> for z in zlist: <NEW_LINE> <INDENT> if self._zone and z['zone'] == self._zone: <NEW_LINE> <INDENT> self.jsonOut([z]) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.jsonOut(zlist) | usage: ns1 zone list
ns1 zone info ZONE
ns1 zone create ZONE [options]
ns1 zone delete [-f] ZONE
Options:
--refresh N SOA Refresh
--retry N SOA Retry
--expiry N SOA Expiry
--nx_ttl N SOA NX TTL
-f Force: override the write lock if one exists
Zone Actions:
list List all active zones
info Get zone details
create Create a new zone
delete Delete a zone and all records it contains | 625990733317a56b869bf1c6 |
class svc: <NEW_LINE> <INDENT> def __init__(self, service_name, service_type): <NEW_LINE> <INDENT> self.service_name = service_name <NEW_LINE> self.service_type = service_type <NEW_LINE> self.proxy = None <NEW_LINE> self.connect() <NEW_LINE> <DEDENT> def connect(self, action = "connect"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> rospy.wait_for_service(self.service_name, timeout = rospy.Duration.from_sec(2)) <NEW_LINE> <DEDENT> except rospy.ROSInterruptException: <NEW_LINE> <INDENT> rospy.logerr("Interrupted waiting for required service [%s] on %s", self.service_name, action) <NEW_LINE> raise <NEW_LINE> <DEDENT> except rospy.ROSException: <NEW_LINE> <INDENT> rospy.logerr("Timeout waiting for required service [%s] on %s", self.service_name, action) <NEW_LINE> raise <NEW_LINE> <DEDENT> self.proxy = rospy.ServiceProxy(self.service_name, self.service_type) <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = self.proxy(*args) <NEW_LINE> <DEDENT> except rospy.service.ServiceException as e: <NEW_LINE> <INDENT> rospy.logerr("Failed to call %s service, attempting to reconnect: %s", self.service_name, e) <NEW_LINE> self.proxy = None <NEW_LINE> for attempt in range(3): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.connect(action = "reconnect") <NEW_LINE> if self.proxy is not None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> except rospy.ROSInterruptException as e: <NEW_LINE> <INDENT> rospy.logerr("Interrupted reconnecting to service [%s]: %s", self.service_name, e) <NEW_LINE> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> rospy.logerr("Failed to reconnect to %s service (attempt %s)", self.service_name, attempt + 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self.proxy is None: <NEW_LINE> <INDENT> rospy.logfatal("Failed to reconnect to %s service, node will exit", self.service_name) <NEW_LINE> rospy.signal_shutdown("Failed to reconnect to {} service, node will exit".format(self.service_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = self.proxy(*args) <NEW_LINE> <DEDENT> return res | Wrapper class for a ROS service to allow for reconnection
Currently doesn't cleanly recognise the difference between ^C having
caused a problem with the proxy call or a failure with the underlying
TCP connection | 625990738a43f66fc4bf3a98 |
class SolverFilter(admin.SimpleListFilter): <NEW_LINE> <INDENT> title = 'resuelto por' <NEW_LINE> parameter_name = 'resolver' <NEW_LINE> def lookups(self, request, model_admin): <NEW_LINE> <INDENT> return ( ('me', 'Resueltos por mi'), ) <NEW_LINE> <DEDENT> def queryset(self, request, queryset): <NEW_LINE> <INDENT> if self.value() == 'me': <NEW_LINE> <INDENT> return queryset.filter(solver = request.user) | Para filtrar las AdminTask que ha resuelto el usuario que lo
solicita | 625990734e4d562566373d0a |
class PresetMonochromePianoRoll(BasePreset): <NEW_LINE> <INDENT> def first_load(self, score): <NEW_LINE> <INDENT> self.lowest_pitch, self.highest_pitch = util.get_edge_pitches(score) <NEW_LINE> self.viz_manager.screen.fill((0, 0, 255)) <NEW_LINE> <DEDENT> def per_note_on(self, screen, viz_note): <NEW_LINE> <INDENT> note = viz_note.note <NEW_LINE> screen_x = self.viz_manager.main_frame.display.size.x <NEW_LINE> screen_y = self.viz_manager.main_frame.display.size.y <NEW_LINE> y = util.graph_note_y(note, self.highest_pitch, self.lowest_pitch, screen_y) <NEW_LINE> color = colorhelper.midi_to_monochrome(viz_note.note.pitch.midi) <NEW_LINE> note_rect = unit.RectNoteUnit(0, 0, color, note, 200, 20) <NEW_LINE> note_rect = util.create_unit_in_center_of_quadrant(note_rect, (0, 0), (screen_x, screen_y)) <NEW_LINE> note_rect.y = y <NEW_LINE> self.viz_manager.units.append(note_rect) <NEW_LINE> <DEDENT> def per_note_off(self, screen, message): <NEW_LINE> <INDENT> self.viz_manager.remove_unit(message.note) | Similar to PianoRoll, but in black-white monochrome. | 625990735fdd1c0f98e5f887 |
class Job(ProxyResource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'created': {'readonly': True}, 'state': {'readonly': True}, 'input': {'required': True}, 'last_modified': {'readonly': True}, 'outputs': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'created': {'key': 'properties.created', 'type': 'iso-8601'}, 'state': {'key': 'properties.state', 'type': 'JobState'}, 'description': {'key': 'properties.description', 'type': 'str'}, 'input': {'key': 'properties.input', 'type': 'JobInput'}, 'last_modified': {'key': 'properties.lastModified', 'type': 'iso-8601'}, 'outputs': {'key': 'properties.outputs', 'type': '[JobOutput]'}, 'priority': {'key': 'properties.priority', 'type': 'Priority'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(Job, self).__init__(**kwargs) <NEW_LINE> self.created = None <NEW_LINE> self.state = None <NEW_LINE> self.description = kwargs.get('description', None) <NEW_LINE> self.input = kwargs.get('input', None) <NEW_LINE> self.last_modified = None <NEW_LINE> self.outputs = kwargs.get('outputs', None) <NEW_LINE> self.priority = kwargs.get('priority', None) | A Job resource type. The progress and state can be obtained by polling a
Job or subscribing to events using EventGrid.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Fully qualified resource ID for the resource.
:vartype id: str
:ivar name: The name of the resource.
:vartype name: str
:ivar type: The type of the resource.
:vartype type: str
:ivar created: The UTC date and time when the Job was created, in
'YYYY-MM-DDThh:mm:ssZ' format.
:vartype created: datetime
:ivar state: The current state of the job. Possible values include:
'Canceled', 'Canceling', 'Error', 'Finished', 'Processing', 'Queued',
'Scheduled'
:vartype state: str or ~azure.mgmt.media.models.JobState
:param description: Optional customer supplied description of the Job.
:type description: str
:param input: Required. The inputs for the Job.
:type input: ~azure.mgmt.media.models.JobInput
:ivar last_modified: The UTC date and time when the Job was last updated,
in 'YYYY-MM-DDThh:mm:ssZ' format.
:vartype last_modified: datetime
:param outputs: Required. The outputs for the Job.
:type outputs: list[~azure.mgmt.media.models.JobOutput]
:param priority: Priority with which the job should be processed. Higher
priority jobs are processed before lower priority jobs. If not set, the
default is normal. Possible values include: 'Low', 'Normal', 'High'
:type priority: str or ~azure.mgmt.media.models.Priority | 6259907332920d7e50bc794b |
class Test(unittest.TestCase): <NEW_LINE> <INDENT> __test__ = False <NEW_LINE> def __init__(self, test, config=None): <NEW_LINE> <INDENT> if not hasattr(test, '__call__'): <NEW_LINE> <INDENT> raise TypeError("Test called with argument %r that " "is not callable. A callable is required." % test) <NEW_LINE> <DEDENT> self.test = test <NEW_LINE> if config is None: <NEW_LINE> <INDENT> config = Config() <NEW_LINE> <DEDENT> self.config = config <NEW_LINE> unittest.TestCase.__init__(self) <NEW_LINE> <DEDENT> def __call__(self, *arg, **kwarg): <NEW_LINE> <INDENT> return self.run(*arg, **kwarg) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.test) <NEW_LINE> <DEDENT> def _context(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.test.context <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.test.__class__ <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return resolve_name(self.test.__module__) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> context = property(_context, None, None, """Get the context object of this test.""") <NEW_LINE> def run(self, result): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.runTest(result) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> err = sys.exc_info() <NEW_LINE> result.addError(self, err) <NEW_LINE> <DEDENT> <DEDENT> def runTest(self, result): <NEW_LINE> <INDENT> test = self.test <NEW_LINE> test(result) | The universal test case wrapper.
| 625990735fc7496912d48eeb |
class Camera: <NEW_LINE> <INDENT> scale_factor = 1.0 <NEW_LINE> track_body = None <NEW_LINE> def __init__(self, parent): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> <DEDENT> def track(self, body): <NEW_LINE> <INDENT> self.track_body = body <NEW_LINE> <DEDENT> def track_stop(self): <NEW_LINE> <INDENT> self.track_body = None <NEW_LINE> <DEDENT> def center(self, pos, screenCoord=True, stopTrack=True): <NEW_LINE> <INDENT> x, y = pos <NEW_LINE> x -= self.parent.display_width / 2 <NEW_LINE> y -= self.parent.display_height / 2 <NEW_LINE> if screenCoord: <NEW_LINE> <INDENT> x /= self.scale_factor <NEW_LINE> y /= self.scale_factor <NEW_LINE> <DEDENT> self.inc_offset((x, y), screenCoord, stopTrack) <NEW_LINE> <DEDENT> def set_offset(self, offset, screenCoord=True, stopTrack=True): <NEW_LINE> <INDENT> if stopTrack: <NEW_LINE> <INDENT> self.track_stop() <NEW_LINE> <DEDENT> if screenCoord: <NEW_LINE> <INDENT> x, y = self.parent.to_world(offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x, y = offset <NEW_LINE> <DEDENT> self._set_offset((x/self.parent.ppm, y/self.parent.ppm)) <NEW_LINE> <DEDENT> def inc_offset(self, offset, screenCoord=True, stopTrack=True): <NEW_LINE> <INDENT> if stopTrack: <NEW_LINE> <INDENT> self.track_stop() <NEW_LINE> <DEDENT> x, y = self.parent.screen_offset_pixel <NEW_LINE> dx, dy = offset <NEW_LINE> if screenCoord: <NEW_LINE> <INDENT> if self.parent.inputAxis_x_left: <NEW_LINE> <INDENT> dx *= -1 <NEW_LINE> <DEDENT> if self.parent.inputAxis_y_down: <NEW_LINE> <INDENT> dy *= -1 <NEW_LINE> <DEDENT> <DEDENT> self._set_offset(((x + dx) / self.parent.ppm, (y + dy) / self.parent.ppm)) <NEW_LINE> <DEDENT> def _set_offset(self, offset): <NEW_LINE> <INDENT> x, y = offset <NEW_LINE> self.parent.screen_offset = (x, y) <NEW_LINE> self.parent.screen_offset_pixel = (x * self.parent.ppm, y * self.parent.ppm) <NEW_LINE> <DEDENT> def set_scale_factor(self, factor=1.0): <NEW_LINE> <INDENT> self.scale_factor = factor <NEW_LINE> <DEDENT> def inc_scale_factor(self, factor=0.0): <NEW_LINE> <INDENT> self.scale_factor += factor | The Camera class. We will see :)
Please also see: http://www.assembla.com/spaces/elements/tickets/31
This class currently handles:
- Scaling factor
- Screen Offset from the World Coordinate System
Inputs from the user have to be checked for them.
- Places to check for it: elements.py, drawing.py, add_objects.py | 6259907323849d37ff8529bb |
class Asset(models.Model): <NEW_LINE> <INDENT> asset_type_choices = ( ('server', '服务器'), ('networkdevice', '网络设备'), ('storagedevice', '存储设备'), ('securitydevice', '安全设备'), ('software', '软件资产'), ) <NEW_LINE> asset_status_choices = ( (0, '在线'), (1, '下线'), (2, '未知'), (3, '故障'), (4, '备用'), ) <NEW_LINE> asset_type = models.CharField(choices=asset_type_choices, max_length=64, default='server', verbose_name="资产类型") <NEW_LINE> name = models.CharField(max_length=64, unique=True, verbose_name="资产名称") <NEW_LINE> sn = models.CharField(max_length=128, unique=True, verbose_name="资产序列号") <NEW_LINE> business_unit = models.ForeignKey('BusinessUnit', null=True, blank=True, verbose_name='所属业务线', on_delete=models.SET_NULL) <NEW_LINE> status = models.SmallIntegerField(choices=asset_status_choices, default=0, verbose_name='设备状态') <NEW_LINE> manufacturer = models.ForeignKey('Manufacturer', null=True, blank=True, verbose_name='制造商', on_delete=models.SET_NULL) <NEW_LINE> manage_ip = models.GenericIPAddressField(null=True, blank=True, verbose_name='管理IP') <NEW_LINE> tags = models.ManyToManyField('Tag', blank=True, verbose_name='标签') <NEW_LINE> admin = models.ForeignKey(User, null=True, blank=True, verbose_name='资产管理员', related_name='admin', on_delete=models.SET_NULL) <NEW_LINE> idc = models.ForeignKey('IDC', null=True, blank=True, verbose_name='所在机房', on_delete=models.SET_NULL) <NEW_LINE> contract = models.ForeignKey('Contract', null=True, blank=True, verbose_name='合同', on_delete=models.SET_NULL) <NEW_LINE> purchase_day = models.DateField(null=True, blank=True, verbose_name="购买日期") <NEW_LINE> expire_day = models.DateField(null=True, blank=True, verbose_name="过保日期") <NEW_LINE> price = models.FloatField(null=True, blank=True, verbose_name="价格") <NEW_LINE> approved_by = models.ForeignKey(User, null=True, blank=True, verbose_name='批准人', related_name='approved_by', on_delete=models.SET_NULL) <NEW_LINE> memo = models.TextField(null=True, blank=True, verbose_name='备注') <NEW_LINE> c_time = models.DateTimeField(auto_now_add=True, verbose_name='批准日期') <NEW_LINE> m_time = models.DateTimeField(auto_now=True, verbose_name='更新日期') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '<%s> %s' % (self.get_asset_type_display(), self.name) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = '资产总表' <NEW_LINE> verbose_name_plural = "资产总表" <NEW_LINE> ordering = ['-c_time'] | 所有资产的共有数据表 | 6259907356b00c62f0fb41d4 |
class DataDefsView(DictKeyValueView): <NEW_LINE> <INDENT> column_names = ["name", "defn"] <NEW_LINE> mapstring = 1 <NEW_LINE> def relbind(self, db, atts): <NEW_LINE> <INDENT> self.dict = db.datadefs <NEW_LINE> return self | Data defs (of non-special views) and definition dumps. | 625990739c8ee82313040e09 |
class HashAuth(Auth): <NEW_LINE> <INDENT> code: str | :class:`fastapi_gen.oauth2.HashToken` 所用认证字段 | 62599073a219f33f346c810f |
class SaClmCallbacksT(Structure): <NEW_LINE> <INDENT> _fields_ = [('saClmClusterNodeGetCallback', SaClmClusterNodeGetCallbackT), ('saClmClusterTrackCallback', SaClmClusterTrackCallbackT)] | Contain various callbacks CLM service may invoke on registrant.
| 62599073fff4ab517ebcf11f |
class BASEVENUSTECH(BASETELNET): <NEW_LINE> <INDENT> pass | This is a manufacturer of venustech, using the
telnet version of the protocol, so it is integrated with BASETELNET library. | 62599073f9cc0f698b1c5f4d |
class SmarterEncoder(jsonutils.json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if not isinstance(obj, dict) and hasattr(obj, 'iteritems'): <NEW_LINE> <INDENT> return dict(obj.iteritems()) <NEW_LINE> <DEDENT> return super(SmarterEncoder, self).default(obj) | Help for JSON encoding dict-like objects. | 62599073aad79263cf4300bb |
class RenderGirlBlender(bpy.types.RenderEngine): <NEW_LINE> <INDENT> bl_idname = 'RenderGirl' <NEW_LINE> bl_label = 'RenderGirl' <NEW_LINE> bl_use_preview = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> RenderGirl.instance.session = self <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> RenderGirl.instance.session = None <NEW_LINE> <DEDENT> def render(self, scene): <NEW_LINE> <INDENT> scale = scene.render.resolution_percentage / 100.0 <NEW_LINE> size_x = int(scene.render.resolution_x * scale) <NEW_LINE> size_y = int(scene.render.resolution_y * scale) <NEW_LINE> pixel_count = size_x * size_y <NEW_LINE> objects = scene.objects <NEW_LINE> MeshPlus = collections.namedtuple('MeshPlus', ['mesh','name','position','scale','rotation']) <NEW_LINE> meshs = [] <NEW_LINE> for i in range(len(objects)): <NEW_LINE> <INDENT> if objects[i].type == 'MESH': <NEW_LINE> <INDENT> tri_modifier = objects[i].modifiers.new( name="triangulate",type='TRIANGULATE') <NEW_LINE> mesh = objects[i].to_mesh(scene,True,'RENDER') <NEW_LINE> rot = Vector() <NEW_LINE> euler = objects[i].matrix_world.to_euler('XZY') <NEW_LINE> rot.x = euler.x <NEW_LINE> rot.y = euler.z <NEW_LINE> rot.z = euler.y <NEW_LINE> pos = objects[i].matrix_world.translation.xzy <NEW_LINE> mesh_tuple = MeshPlus(mesh,objects[i].name, pos,objects[i].scale.xzy, rot) <NEW_LINE> meshs.append(mesh_tuple) <NEW_LINE> objects[i].modifiers.remove(tri_modifier) <NEW_LINE> <DEDENT> <DEDENT> for i in range(len(meshs)): <NEW_LINE> <INDENT> ret = RenderGirl.instance.add_scene_group(meshs[i]) <NEW_LINE> if ret != 0: <NEW_LINE> <INDENT> raise ValueError("Error adding object {1} to RenderGirl" .format(meshs[i].name)) <NEW_LINE> <DEDENT> <DEDENT> light = None <NEW_LINE> for i in range(len(objects)): <NEW_LINE> <INDENT> if objects[i].type == 'LAMP': <NEW_LINE> <INDENT> light = objects[i] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> rect = RenderGirl.instance.render(size_x, size_y, scene.camera, light) <NEW_LINE> if rect == None: <NEW_LINE> <INDENT> RenderGirl.instance.clear_scene() <NEW_LINE> raise ValueError("Error rendering frame, please check the logs") <NEW_LINE> <DEDENT> result = self.begin_result(0, 0, size_x, size_y) <NEW_LINE> layer = result.layers[0] <NEW_LINE> layer.passes[0].rect = rect <NEW_LINE> self.end_result(result) <NEW_LINE> RenderGirl.instance.clear_scene() | This class provides interface between blender UI and the RenderGirl Core | 6259907326068e7796d4e241 |
class TorrentCache: <NEW_LINE> <INDENT> def infohash_urls(self, info_hash): <NEW_LINE> <INDENT> urls = [host + info_hash.upper() + '.torrent' for host in MIRRORS] <NEW_LINE> random.shuffle(urls) <NEW_LINE> return urls <NEW_LINE> <DEDENT> @plugin.priority(120) <NEW_LINE> def on_task_urlrewrite(self, task, config): <NEW_LINE> <INDENT> for entry in task.accepted: <NEW_LINE> <INDENT> info_hash = None <NEW_LINE> if entry['url'].startswith('magnet:'): <NEW_LINE> <INDENT> info_hash_search = re.search('btih:([0-9a-f]+)', entry['url'], re.IGNORECASE) <NEW_LINE> if info_hash_search: <NEW_LINE> <INDENT> info_hash = info_hash_search.group(1) <NEW_LINE> <DEDENT> <DEDENT> elif entry.get('torrent_info_hash'): <NEW_LINE> <INDENT> info_hash = entry['torrent_info_hash'] <NEW_LINE> <DEDENT> if info_hash: <NEW_LINE> <INDENT> entry.setdefault('urls', [entry['url']]) <NEW_LINE> urls = set(host + info_hash.upper() + '.torrent' for host in MIRRORS) <NEW_LINE> urls = list(urls - set(entry['urls'])) <NEW_LINE> random.shuffle(urls) <NEW_LINE> entry['urls'].extend(urls) | Adds urls to torrent cache sites to the urls list. | 6259907399fddb7c1ca63a56 |
class SangerParser(VariantParser): <NEW_LINE> <INDENT> def __init__(self, vcf_filename, tumor_sample=None): <NEW_LINE> <INDENT> self._vcf_filename = vcf_filename <NEW_LINE> self._tumor_sample = tumor_sample <NEW_LINE> <DEDENT> def _find_ref_and_variant_nt(self, variant): <NEW_LINE> <INDENT> assert len(variant.REF) == len(variant.ALT) == 1 <NEW_LINE> return (str(variant.REF[0]), str(variant.ALT[0])) <NEW_LINE> <DEDENT> def _calc_read_counts(self, variant): <NEW_LINE> <INDENT> normal = variant.genotype('NORMAL') <NEW_LINE> tumor = variant.genotype('TUMOUR') <NEW_LINE> reference_nt, variant_nt = self._find_ref_and_variant_nt(variant) <NEW_LINE> tumor_reads = { 'forward': { 'A': int(tumor['FAZ']), 'C': int(tumor['FCZ']), 'G': int(tumor['FGZ']), 'T': int(tumor['FTZ']), }, 'reverse': { 'A': int(tumor['RAZ']), 'C': int(tumor['RCZ']), 'G': int(tumor['RGZ']), 'T': int(tumor['RTZ']), }, } <NEW_LINE> ref_reads = tumor_reads['forward'][reference_nt] + tumor_reads['reverse'][reference_nt] <NEW_LINE> variant_reads = tumor_reads['forward'][variant_nt] + tumor_reads['reverse'][variant_nt] <NEW_LINE> total_reads = ref_reads + variant_reads <NEW_LINE> return (ref_reads, total_reads) | Works with PCAWG variant calls from the Sanger. | 62599073ec188e330fdfa1aa |
class Stage: <NEW_LINE> <INDENT> def __init__(self, word, prior = None): <NEW_LINE> <INDENT> self.word = word <NEW_LINE> self.prior = prior <NEW_LINE> <DEDENT> def collectTrail(self): <NEW_LINE> <INDENT> trail = [] <NEW_LINE> node = self <NEW_LINE> while node: <NEW_LINE> <INDENT> trail.insert(0, node.word) <NEW_LINE> node = node.prior <NEW_LINE> <DEDENT> return trail | A Stage in the word ladder, recording prior word (which defaults to None). | 62599073167d2b6e312b8212 |
class ResBlock(nn.Module): <NEW_LINE> <INDENT> def __init__(self, kernel=3, num_feats=64, padding=1, bias=True, res_scale=1): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> layers = [] <NEW_LINE> layers += [nn.Conv2d(in_channels=num_feats, out_channels=num_feats, kernel_size=kernel, padding=padding, bias=bias)] <NEW_LINE> layers += [nn.ReLU(inplace=True)] <NEW_LINE> layers += [nn.Conv2d(in_channels=num_feats, out_channels=num_feats, kernel_size=kernel, padding=padding, bias=bias)] <NEW_LINE> self.conv = nn.Sequential(*layers) <NEW_LINE> self.res_scale = res_scale <NEW_LINE> layers = [nn.Conv2d(in_channels=num_feats, out_channels=num_feats, kernel_size=kernel, padding=padding, bias=bias)] <NEW_LINE> self.conv_last = nn.Sequential(*layers) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> res = self.conv(x).mul(self.res_scale) <NEW_LINE> return self.conv_last(x + res) | Residual Block | 62599073e1aae11d1e7cf490 |
class GetTopTagsInputSet(InputSet): <NEW_LINE> <INDENT> def set_APIKey(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('APIKey', value) <NEW_LINE> <DEDENT> def set_Limit(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('Limit', value) <NEW_LINE> <DEDENT> def set_User(self, value): <NEW_LINE> <INDENT> super(GetTopTagsInputSet, self)._set_input('User', value) | An InputSet with methods appropriate for specifying the inputs to the GetTopTags
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 6259907316aa5153ce401dde |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.