code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Hls(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'fragments_per_ts_segment': {'key': 'fragmentsPerTsSegment', 'type': 'int'}, } <NEW_LINE> def __init__( self, *, fragments_per_ts_segment: Optional[int] = None, **kwargs ): <NEW_LINE> <INDENT> super(Hls, self).__init__(**kwargs) <NEW_LINE> self.fragments_per_ts_segment = fragments_per_ts_segment | HTTP Live Streaming (HLS) packing setting for the live output.
:param fragments_per_ts_segment: The number of fragments in an HTTP Live Streaming (HLS) TS
segment in the output of the live event. This value does not affect the packing ratio for HLS
CMAF output.
:type fragments_per_ts_segment: int | 6259905607d97122c4218214 |
class ApiDocsMalhas(ApiDocsService): <NEW_LINE> <INDENT> @property <NEW_LINE> def produces(self): <NEW_LINE> <INDENT> return ["image/svg+xml", "application/vnd.geo+json", "application/json"] <NEW_LINE> <DEDENT> def _paths_responses(self, operation_id): <NEW_LINE> <INDENT> if operation_id == "idGet": <NEW_LINE> <INDENT> return {"description": "Malha renderizada"} <NEW_LINE> <DEDENT> raise | Special case for /api/v2/malhas
It's the only that doesn't provide info in responses,
also, the `produces` entry is filled here by hand | 6259905694891a1f408ba1ab |
class Scene: <NEW_LINE> <INDENT> def __init__(self, description: str): <NEW_LINE> <INDENT> self.description = description <NEW_LINE> self.dialogue = [] <NEW_LINE> <DEDENT> def add_dialogue(self, description: str, *, author: str=None): <NEW_LINE> <INDENT> if author: <NEW_LINE> <INDENT> self.dialogue.append(Dialogue(author, description)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dialogue.append(StageDirection(description)) <NEW_LINE> <DEDENT> <DEDENT> def add_direction(self, description: str): <NEW_LINE> <INDENT> self.add_dialogue(description) | The script's scene | 625990564e4d562566373971 |
class SelectAbspath_Field(Select_Field): <NEW_LINE> <INDENT> def get_links(self, links, resource, field_name, languages): <NEW_LINE> <INDENT> return get_abspath_links(self, links, resource, field_name, languages) <NEW_LINE> <DEDENT> def update_links(self, resource, field_name, source, target, languages, old_base, new_base): <NEW_LINE> <INDENT> update_abspath_links(self, resource, field_name, source, target, languages, old_base, new_base) <NEW_LINE> <DEDENT> def update_incoming_links(self, resource, field_name, source, languages): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_value_title(self, resource, name, language=None, mode=None): <NEW_LINE> <INDENT> abspath = self.get_value(resource, name, language=language) <NEW_LINE> if not abspath: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.multiple is False: <NEW_LINE> <INDENT> return resource.get_resource(abspath).get_title() <NEW_LINE> <DEDENT> titles = [] <NEW_LINE> for x in abspath: <NEW_LINE> <INDENT> r = resource.get_resource(x) <NEW_LINE> titles.append(r.get_title()) <NEW_LINE> <DEDENT> return ', '.join(titles) | Select_Field with values linking to resources abspath | 6259905699cbb53fe6832449 |
class DeclareQueuePublisher(Publisher): <NEW_LINE> <INDENT> def send(self, conn, msg, timeout=None): <NEW_LINE> <INDENT> queue = kombu.entity.Queue( channel=conn.channel, exchange=self.exchange, durable=self.durable, auto_delete=self.auto_delete, name=self.routing_key, routing_key=self.routing_key, queue_arguments=self.queue_arguments) <NEW_LINE> queue.declare() <NEW_LINE> super(DeclareQueuePublisher, self).send( conn, msg, timeout) | Publisher that declares a default queue
When the exchange is missing instead of silently creating an exchange
not binded to a queue, this publisher creates a default queue
named with the routing_key.
This is mainly used to not miss notifications in case of nobody consumes
them yet. If the future consumer binds the default queue it can retrieve
missing messages. | 62599056596a897236129065 |
class Solution: <NEW_LINE> <INDENT> def twoSum2(self, nums, target): <NEW_LINE> <INDENT> nums.sort() <NEW_LINE> count, l, r = 0, 0, len(nums)-1 <NEW_LINE> while l < r: <NEW_LINE> <INDENT> cur_sum = nums[l] + nums[r] <NEW_LINE> if cur_sum > target: <NEW_LINE> <INDENT> count += r - l <NEW_LINE> r -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l += 1 <NEW_LINE> <DEDENT> <DEDENT> return count | @param nums: an array of integer
@param target: An integer
@return: an integer
443. 两数之和 II
给一组整数,问能找出多少对整数,他们的和大于一个给定的目标值。
样例
对于 numbers = [2, 7, 11, 15], target = 24 的情况,返回 1。因为只有11 + 15可以大于24。
挑战
Do it in O(1) extra space and O(nlogn) time. | 6259905623849d37ff85262f |
class DeployTarget(object): <NEW_LINE> <INDENT> def __init__(self, host, username): <NEW_LINE> <INDENT> self._host = host <NEW_LINE> self._username = username <NEW_LINE> self._shell_client = ShellClientFactory.create(host, username) <NEW_LINE> <DEDENT> def deploy_binary(self, source_tar, dest_dir): <NEW_LINE> <INDENT> parent_dest_dir = os.path.dirname(dest_dir) <NEW_LINE> self._shell_client.exec_command('rm -rf {0}; mkdir -p {0}'.format(dest_dir), error_on_failure=True) <NEW_LINE> self._shell_client.copy(source_tar, '{}/clusterrunner.tgz'.format(parent_dest_dir), error_on_failure=True) <NEW_LINE> self._shell_client.exec_command( command='tar zxvf {}/clusterrunner.tgz -C {}'.format(parent_dest_dir, dest_dir), error_on_failure=True ) <NEW_LINE> <DEDENT> def deploy_conf(self, source_path, dest_path): <NEW_LINE> <INDENT> if not os.path.exists(source_path): <NEW_LINE> <INDENT> raise RuntimeError('Expected configuration file to exist in {}, but does not.'.format(source_path)) <NEW_LINE> <DEDENT> self._shell_client.copy(source_path, dest_path) <NEW_LINE> self._shell_client.exec_command('chmod 600 {}'.format(dest_path), error_on_failure=True) | A "deploy target" is the host to which clusterrunner will be deployed to. Deployment entails putting
in place the clusterrunner binaries and configuration only. This class is not responsible for manipulating
processes and stopping/starting services. | 6259905621bff66bcd7241cf |
class PageLoaderTestCase(LoginEnrollmentTestCase): <NEW_LINE> <INDENT> def check_all_pages_load(self, course_id): <NEW_LINE> <INDENT> store = modulestore() <NEW_LINE> course = store.get_course(course_id) <NEW_LINE> self.enroll(course, True) <NEW_LINE> items = store.get_items(course_id) <NEW_LINE> if len(items) < 1: <NEW_LINE> <INDENT> self.fail('Could not retrieve any items from course') <NEW_LINE> <DEDENT> for descriptor in items: <NEW_LINE> <INDENT> if descriptor.location.category == 'about': <NEW_LINE> <INDENT> self._assert_loads('about_course', {'course_id': course_id.to_deprecated_string()}, descriptor) <NEW_LINE> <DEDENT> elif descriptor.location.category == 'static_tab': <NEW_LINE> <INDENT> kwargs = {'course_id': course_id.to_deprecated_string(), 'tab_slug': descriptor.location.name} <NEW_LINE> self._assert_loads('static_tab', kwargs, descriptor) <NEW_LINE> <DEDENT> elif descriptor.location.category == 'course_info': <NEW_LINE> <INDENT> self._assert_loads('info', {'course_id': course_id.to_deprecated_string()}, descriptor) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwargs = {'course_id': course_id.to_deprecated_string(), 'location': descriptor.location.to_deprecated_string()} <NEW_LINE> self._assert_loads('jump_to', kwargs, descriptor, expect_redirect=True, check_content=True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _assert_loads(self, django_url, kwargs, descriptor, expect_redirect=False, check_content=False): <NEW_LINE> <INDENT> url = reverse(django_url, kwargs=kwargs) <NEW_LINE> response = self.client.get(url, follow=True) <NEW_LINE> if response.status_code != 200: <NEW_LINE> <INDENT> self.fail('Status %d for page %s' % (response.status_code, descriptor.location)) <NEW_LINE> <DEDENT> if expect_redirect: <NEW_LINE> <INDENT> self.assertEqual(response.redirect_chain[0][1], 302) <NEW_LINE> <DEDENT> if check_content: <NEW_LINE> <INDENT> self.assertNotContains(response, "this module is temporarily unavailable") <NEW_LINE> self.assertNotIsInstance(descriptor, ErrorDescriptor) | Base class that adds a function to load all pages in a modulestore. | 625990564e4d562566373972 |
class Bot: <NEW_LINE> <INDENT> def __init__(self, number_of_users, max_posts_per_user, max_like_per_user): <NEW_LINE> <INDENT> self.users_count = number_of_users <NEW_LINE> self.max_posts = max_posts_per_user <NEW_LINE> self.max_likes = max_like_per_user <NEW_LINE> <DEDENT> def user_signup(self): <NEW_LINE> <INDENT> url_method = url + 'user/signup/' <NEW_LINE> _password = GenerateUserData()._generate_password() <NEW_LINE> data = { 'email': GenerateUserData().generate_email(), 'username': GenerateUserData().generate_username(), 'password': _password } <NEW_LINE> resp = requests.post(url=url_method, data=data) <NEW_LINE> return resp.json().get('username'), _password <NEW_LINE> <DEDENT> def user_login(self, username, _password): <NEW_LINE> <INDENT> url_method = url + 'user/login/' <NEW_LINE> data = { 'username': username, 'password': _password } <NEW_LINE> resp = requests.post(url=url_method, data=data) <NEW_LINE> _token = resp.json().get('token') <NEW_LINE> return _token <NEW_LINE> <DEDENT> def create_post(self, _token): <NEW_LINE> <INDENT> url_method = url + 'post/create/' <NEW_LINE> data = {'text': GenerateUserData().generate_post_text()} <NEW_LINE> headers = {'Authorization': 'Token %s' % _token} <NEW_LINE> resp = requests.post(url=url_method, data=data, headers=headers) <NEW_LINE> post_id = resp.json().get('id') <NEW_LINE> return post_id <NEW_LINE> <DEDENT> def liked_post(self, _token, post_id): <NEW_LINE> <INDENT> url_method = url + 'post/%d/like' % post_id <NEW_LINE> headers = {'Authorization': 'Token %s' % _token} <NEW_LINE> resp = requests.get(url=url_method, headers=headers) <NEW_LINE> <DEDENT> def start_bot(self): <NEW_LINE> <INDENT> for _ in range(self.users_count): <NEW_LINE> <INDENT> username, _password = self.user_signup() <NEW_LINE> _token = self.user_login(username, _password) <NEW_LINE> count_of_posts = random.randint(1, self.max_posts) <NEW_LINE> count_of_likes = random.randint(1, self.max_likes) <NEW_LINE> all_posts = [] <NEW_LINE> while count_of_posts != 0: <NEW_LINE> <INDENT> post_id = self.create_post(_token) <NEW_LINE> all_posts.append(post_id) <NEW_LINE> count_of_posts -= 1 <NEW_LINE> <DEDENT> while count_of_likes != 0: <NEW_LINE> <INDENT> random_post = random.choice(all_posts) <NEW_LINE> self.liked_post(_token, random_post) <NEW_LINE> count_of_likes -= 1 | Automated bot for user registration, post creation and post liked. | 625990568e71fb1e983bd034 |
class InvalidLogFormatException(Exception): <NEW_LINE> <INDENT> pass | Thrown when an invalid logformat is passed. | 625990563eb6a72ae038bbca |
class DbLinkedList(): <NEW_LINE> <INDENT> def __init__(self, value=None): <NEW_LINE> <INDENT> self.head = None <NEW_LINE> self.tail = None <NEW_LINE> self.length = 0 <NEW_LINE> if value: <NEW_LINE> <INDENT> self.push(value) <NEW_LINE> <DEDENT> <DEDENT> def push(self, value=None): <NEW_LINE> <INDENT> new_node = Node(value, nxt=self.head) <NEW_LINE> if self.length < 1: <NEW_LINE> <INDENT> self.tail = new_node <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.head.previous = new_node <NEW_LINE> <DEDENT> self.head = new_node <NEW_LINE> self.length += 1 <NEW_LINE> <DEDENT> def append(self, value): <NEW_LINE> <INDENT> new_node = Node(value, None, self.tail) <NEW_LINE> if self.length < 1: <NEW_LINE> <INDENT> self.head = new_node <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.tail.next = new_node <NEW_LINE> <DEDENT> self.tail = new_node <NEW_LINE> self.length += 1 <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if self.head: <NEW_LINE> <INDENT> returned_value = self.head.value <NEW_LINE> self.head = self.head.next <NEW_LINE> self.head.previous = None <NEW_LINE> self.length -= 1 <NEW_LINE> return returned_value <NEW_LINE> <DEDENT> raise ValueError("Cannot pop from an empty list") <NEW_LINE> <DEDENT> def shift(self): <NEW_LINE> <INDENT> if self.head: <NEW_LINE> <INDENT> returned_value = self.tail.value <NEW_LINE> self.tail = self.tail.previous <NEW_LINE> self.tail.next = None <NEW_LINE> self.length -= 1 <NEW_LINE> return returned_value <NEW_LINE> <DEDENT> raise ValueError("Cannot shift from an empty list") <NEW_LINE> <DEDENT> def remove(self, value): <NEW_LINE> <INDENT> curr_node = self.head <NEW_LINE> if not self.length: <NEW_LINE> <INDENT> raise ValueError("Cannot remove from an empty list") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if curr_node.value == value: <NEW_LINE> <INDENT> self.pop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> while curr_node is not None: <NEW_LINE> <INDENT> if curr_node.value == value: <NEW_LINE> <INDENT> curr_node.previous.next = curr_node.next <NEW_LINE> curr_node.next.previous = curr_node.previous <NEW_LINE> print("{} was removed".format(value)) <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> curr_node = curr_node.next <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise ValueError("{} not in the list".format(value)) | Instantiate a doubly linked list. | 62599056fff4ab517ebced8f |
class AccountIntrastatCode(models.Model): <NEW_LINE> <INDENT> _name = "account.intrastat.code" <NEW_LINE> _description = "Intrastat Code" <NEW_LINE> _translate = False <NEW_LINE> _order = "nckey" <NEW_LINE> name = fields.Char(string="Name") <NEW_LINE> nckey = fields.Char(string="NC Key") <NEW_LINE> code = fields.Char(string="NC Code", required=True) <NEW_LINE> description = fields.Char(string="Description") <NEW_LINE> suppl_unit_code = fields.Char("SupplUnitCode") <NEW_LINE> def name_get(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for r in self: <NEW_LINE> <INDENT> text = r.name or r.description <NEW_LINE> result.append((r.id, text and "{} {}".format(r.code, text) or r.code)) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @api.model <NEW_LINE> def _name_search(self, name="", args=None, operator="ilike", limit=100): <NEW_LINE> <INDENT> if args is None: <NEW_LINE> <INDENT> args = [] <NEW_LINE> <DEDENT> domain = args + [ "|", "|", ("code", operator, name), ("name", operator, name), ("description", operator, name), ] <NEW_LINE> return super(AccountIntrastatCode, self).search(domain, limit=limit).name_get() <NEW_LINE> <DEDENT> _sql_constraints = [ ( "intrastat_region_nckey_unique", "UNIQUE (nckey)", "The NC key must be unique.", ), ] | Codes used for the intrastat reporting.
The list of commodity codes is available on:
http://www.intrastat.ro/doc/CN_2020.xml | 6259905632920d7e50bc75b2 |
class ParseStreamingManifestRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.MediaManifestContent = None <NEW_LINE> self.ManifestType = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.MediaManifestContent = params.get("MediaManifestContent") <NEW_LINE> self.ManifestType = params.get("ManifestType") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | ParseStreamingManifest请求参数结构体
| 625990563cc13d1c6d466cab |
class DirSchema(MetaParser): <NEW_LINE> <INDENT> schema = { 'dir': { 'dir_name': str, 'total_bytes': str, 'total_free_bytes': str, Optional('location'): str, Optional('files'): {Any(): {Optional('size'): str, Optional('date'): str, Optional('permission'): str, Optional('index'): str, Optional('time'): str, Optional('date'): str} }, }, } | Schema for
* dir
* dir {directory}
* dir location {location}
* dir {directory} location {location} | 6259905615baa723494634fe |
class FTP(ftplib.FTP): <NEW_LINE> <INDENT> def __init__(self, host, user=None, passwd=None, netrcfile=None, logger=None): <NEW_LINE> <INDENT> auths = parse_netrc(netrcfile) <NEW_LINE> if host in auths: <NEW_LINE> <INDENT> if user is None: <NEW_LINE> <INDENT> user = auths[host]['login'] <NEW_LINE> <DEDENT> if passwd is None: <NEW_LINE> <INDENT> passwd = auths[host]['password'] <NEW_LINE> <DEDENT> <DEDENT> args = [] <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> args.append(user) <NEW_LINE> if passwd is not None: <NEW_LINE> <INDENT> args.append(passwd) <NEW_LINE> <DEDENT> <DEDENT> ftplib.FTP.__init__(self, host) <NEW_LINE> self.login(*args) <NEW_LINE> self.ftp = self <NEW_LINE> self.logger = logger <NEW_LINE> if self.logger: <NEW_LINE> <INDENT> self.logger.info('Ska.ftp: log in to {} as {}'.format(host, user)) <NEW_LINE> <DEDENT> <DEDENT> def cd(self, dirname): <NEW_LINE> <INDENT> if self.logger: <NEW_LINE> <INDENT> self.logger.info('Ska.ftp: cd {}'.format(dirname)) <NEW_LINE> <DEDENT> self.cwd(dirname) <NEW_LINE> <DEDENT> def ls(self, dirname='', *args): <NEW_LINE> <INDENT> if self.logger: <NEW_LINE> <INDENT> self.logger.info('Ska.ftp: ls {} {}'.format(dirname, ' '.join(str(x) for x in args))) <NEW_LINE> <DEDENT> return self.nlst(dirname, *args) <NEW_LINE> <DEDENT> def ls_full(self, dirname='', *args): <NEW_LINE> <INDENT> if self.logger: <NEW_LINE> <INDENT> self.logger.info('Ska.ftp: ls {} {}'.format(dirname, ' '.join(str(x) for x in args))) <NEW_LINE> <DEDENT> return self.dir(dirname, *args) <NEW_LINE> <DEDENT> def put(self, localfile, remotefile=None): <NEW_LINE> <INDENT> if remotefile is None: <NEW_LINE> <INDENT> remotefile = os.path.basename(localfile) <NEW_LINE> <DEDENT> if self.logger: <NEW_LINE> <INDENT> self.logger.info('Ska.ftp: put {} as {}'.format(localfile, remotefile)) <NEW_LINE> <DEDENT> with contextlib.closing(open(localfile, 'rb')) as fh: <NEW_LINE> <INDENT> self.storbinary('STOR ' + remotefile, fh) <NEW_LINE> <DEDENT> <DEDENT> def get(self, remotefile, localfile=None): <NEW_LINE> <INDENT> if localfile is None: <NEW_LINE> <INDENT> localfile = os.path.basename(remotefile) <NEW_LINE> <DEDENT> if self.logger: <NEW_LINE> <INDENT> self.logger.info('Ska.ftp: get {} as {}'.format(remotefile, localfile)) <NEW_LINE> <DEDENT> with contextlib.closing(open(localfile, 'wb')) as fh: <NEW_LINE> <INDENT> self.retrbinary('RETR ' + remotefile, fh.write) | Initialize object for simpler ftp operations.
The FTP object has an attribute ``ftp`` which is the actual ftplib.FTP()
object. This can be used for operations not supported by this class.
:param host: ftp host name
:param user: user name (default=netrc value or anonymous)
:param passwd: password (default=netrc value or anonymous@ )
:param netrcfile: netrc file name (default=~/.netrc)
:param logger: logger object (e.g. pyyaks.logger.get_logger()) | 625990567cff6e4e811b6fae |
class SimBot(Bot): <NEW_LINE> <INDENT> def __init__(self, sim, name, owner): <NEW_LINE> <INDENT> super().__init__(name, owner) <NEW_LINE> self.sensors = [] <NEW_LINE> self.sim = sim <NEW_LINE> self.observer = Observer(self.notify) <NEW_LINE> self.agent.observable.attachObserver(self.observer) <NEW_LINE> <DEDENT> def addSensor(self, newSensor): <NEW_LINE> <INDENT> self.sensors.append(newSensor) <NEW_LINE> <DEDENT> def hasSensor(self, sensorType): <NEW_LINE> <INDENT> for s in self.sensors: <NEW_LINE> <INDENT> if s.sensorType == sensorType: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def logStatusMessage(self, msg): <NEW_LINE> <INDENT> self.sim.addStatusMessage(msg) <NEW_LINE> <DEDENT> def notify(self, msg): <NEW_LINE> <INDENT> self.logStatusMessage(msg) <NEW_LINE> <DEDENT> def receiveStimuli(self, sensorName, stimuli): <NEW_LINE> <INDENT> terms = stimuli.split(' ') <NEW_LINE> if len(terms) < 1: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for s in self.sensors: <NEW_LINE> <INDENT> if s.name.lower() == sensorName.lower(): <NEW_LINE> <INDENT> if s.sensorType.lower() == 'newton': <NEW_LINE> <INDENT> self.logStatusMessage(self.name + ': Sensing newtons.') <NEW_LINE> sData = s.receiveWeight(terms[0]) <NEW_LINE> self.agent.receiveSensoryData(sData) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def updateFromEnvironment(self): <NEW_LINE> <INDENT> envAudio = self.sim.getEnvAudio() <NEW_LINE> for s in self.sensors: <NEW_LINE> <INDENT> if s.sensorType.lower() == 'audio': <NEW_LINE> <INDENT> if len(envAudio) > 0: <NEW_LINE> <INDENT> self.logStatusMessage(self.name + ': Sensing audio.') <NEW_LINE> <DEDENT> for a in envAudio: <NEW_LINE> <INDENT> sData = s.receiveAudio(a) <NEW_LINE> self.agent.receiveSensoryData(sData) | Bot for Simulator | 625990560fa83653e46f6451 |
class ZConsError(RuntimeError): <NEW_LINE> <INDENT> pass | Base class for zcons exceptions.
| 625990568da39b475be04758 |
class MainHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def initialize(self, conn): <NEW_LINE> <INDENT> self.conn = conn <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def delete(self): <NEW_LINE> <INDENT> c = self.conn.cursor() <NEW_LINE> c.execute('DELETE FROM logged_requests') <NEW_LINE> c.execute('DELETE FROM methods') <NEW_LINE> c.execute('DELETE FROM tracebacks') <NEW_LINE> c.execute('DELETE FROM responses') <NEW_LINE> c.execute('DELETE FROM domains') <NEW_LINE> self.conn.commit() <NEW_LINE> c.close() <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def get(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> logged_requests = {} <NEW_LINE> analysis = { 'total_requests': 0, 'domains': set(), 'duration': 0 } <NEW_LINE> c = self.conn.cursor() <NEW_LINE> c.execute('SELECT * from logged_requests') <NEW_LINE> for row in c.fetchall(): <NEW_LINE> <INDENT> url, duration = row <NEW_LINE> if url not in logged_requests: <NEW_LINE> <INDENT> logged_requests[url] = { 'count': 0, 'methods': set(), 'tracebacks': set(), 'responses': set() } <NEW_LINE> <DEDENT> logged_requests[url]['count'] += 1 <NEW_LINE> analysis['total_requests'] += 1 <NEW_LINE> analysis['duration'] += duration <NEW_LINE> <DEDENT> c.execute('SELECT * from domains') <NEW_LINE> for row in c.fetchall(): <NEW_LINE> <INDENT> analysis['domains'].add(row[0]) <NEW_LINE> <DEDENT> c.execute('SELECT * from methods') <NEW_LINE> for row in c.fetchall(): <NEW_LINE> <INDENT> url, method = row <NEW_LINE> logged_requests[url]['methods'].add(method) <NEW_LINE> <DEDENT> for row in c.fetchall(): <NEW_LINE> <INDENT> url, traceback = row <NEW_LINE> logged_requests[url]['tracebacks'].add( tuple(json.loads(traceback)) ) <NEW_LINE> <DEDENT> c.execute('SELECT * from responses') <NEW_LINE> for row in c.fetchall(): <NEW_LINE> <INDENT> url, status_code, content = row <NEW_LINE> logged_requests[url]['responses'].add((status_code, content)) <NEW_LINE> <DEDENT> c.close() <NEW_LINE> self.write(json.dumps({ 'logged_requests': logged_requests, 'analysis': analysis }, cls=MonitorEncoder)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> print(dir(e)) <NEW_LINE> <DEDENT> <DEDENT> @gen.coroutine <NEW_LINE> def post(self): <NEW_LINE> <INDENT> request_data = json_decode(self.request.body) <NEW_LINE> url = request_data.get('url') <NEW_LINE> c = self.conn.cursor() <NEW_LINE> c.execute( 'INSERT INTO logged_requests (url, duration) VALUES (?,?)', (url, request_data.get('duration')) ) <NEW_LINE> c.execute( 'INSERT INTO methods (url, method) VALUES (?,?)', (url, request_data.get('method')) ) <NEW_LINE> c.execute( 'INSERT INTO tracebacks (url, traceback) VALUES (?,?)', (url, json.dumps(request_data.get('traceback_list'))) ) <NEW_LINE> c.execute( 'INSERT INTO responses (url, status_code, content) VALUES (?,?,?)', ( url, request_data.get('response_content'), request_data.get('response_status_code') ) ) <NEW_LINE> c.execute( 'INSERT INTO domains (domain) VALUES (?)', (request_data.get('domain'),) ) <NEW_LINE> self.conn.commit() <NEW_LINE> c.close() | Handler. | 6259905682261d6c52730980 |
class Generator(Base): <NEW_LINE> <INDENT> def __init__(self, text, cursor): <NEW_LINE> <INDENT> self.t = text <NEW_LINE> self.c = cursor <NEW_LINE> self.first = False <NEW_LINE> return <NEW_LINE> <DEDENT> def do_string(self, t): <NEW_LINE> <INDENT> self.t.insertString(self.c, t, False) <NEW_LINE> <DEDENT> def do_i(self, t): <NEW_LINE> <INDENT> self.c.CharPosture = ITALIC[0] <NEW_LINE> for s in t.children: self(s) <NEW_LINE> self.c.CharPosture = ITALIC[1] <NEW_LINE> <DEDENT> def do_b(self, t): <NEW_LINE> <INDENT> self.c.CharWeight = BOLD[0] <NEW_LINE> for s in t.children: self(s) <NEW_LINE> self.c.CharWeight = BOLD[1] <NEW_LINE> <DEDENT> def do_br(self, t): <NEW_LINE> <INDENT> self.t.insertString(self.c, u'\x0a', False) <NEW_LINE> <DEDENT> def begin_biblio(self): <NEW_LINE> <INDENT> self.first = True <NEW_LINE> <DEDENT> def begin_reference(self, key): <NEW_LINE> <INDENT> if self.first: <NEW_LINE> <INDENT> self.first = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.t.insertString(self.c, u'\x0d', False) <NEW_LINE> <DEDENT> self.t.insertString(self.c, u'[%s]\xa0' % key, False) | Returns an object capable of transforming an abstract
representation of text into actual text in OpenOffice. | 6259905630dc7b76659a0d35 |
class AbstractDatabase(ABC): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def connect(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def run_stored_procedure(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def bulk_insert(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def read_to_datafame(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def run_query(self): <NEW_LINE> <INDENT> pass | Abstract base class to serve as parent for different implementations of databases. | 6259905694891a1f408ba1ac |
class ProcessIntruder(ProcessPlayer): <NEW_LINE> <INDENT> def __init__(self, env, id, state, episode_count): <NEW_LINE> <INDENT> super(ProcessIntruder, self).__init__(env, id, state, episode_count) <NEW_LINE> self.role = 'intruder' <NEW_LINE> self.action_space = Config.INTRUDER_ACTION_SPACE <NEW_LINE> self.nb_actions = self.get_num_actions() <NEW_LINE> self.vmax = Config.INTRUDER_MAX_VELOCITY <NEW_LINE> self.model = NetworkHH(Config.DEVICE, self.role+str(self.id), self.action_space) <NEW_LINE> self.captured = Value('i', 0) <NEW_LINE> self.entered = Value('i', 0) <NEW_LINE> <DEDENT> def clearup_reward(self): <NEW_LINE> <INDENT> reward = 0 <NEW_LINE> if self.entered.value: <NEW_LINE> <INDENT> reward += Config.REWARD_ENTER <NEW_LINE> <DEDENT> reward -= self.env.world.target.contour(self.state.x, self.state.y)/self.env.world.max_target_level <NEW_LINE> if self.captured.value or self.entered.value: <NEW_LINE> <INDENT> self.state.done = 1 <NEW_LINE> <DEDENT> return reward <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> reward = self.clearup_reward() <NEW_LINE> if not self.state.done: <NEW_LINE> <INDENT> new_x, new_y = self.try_step(action) <NEW_LINE> num_trial = 0 <NEW_LINE> while not self.env.world.is_in_world(new_x, new_y) and num_trial < 2 * self.nb_actions: <NEW_LINE> <INDENT> new_x, new_y = self.try_step(self.random_move()) <NEW_LINE> num_trial += 1 <NEW_LINE> <DEDENT> if num_trial < 2*self.nb_actions: <NEW_LINE> <INDENT> self.state.x = new_x <NEW_LINE> self.state.y = new_y <NEW_LINE> <DEDENT> for d in range(len(self.env.defenders)): <NEW_LINE> <INDENT> if self.env.defenders[d].is_captured(self.env.defenders[d], self): <NEW_LINE> <INDENT> self.captured.value = 1 <NEW_LINE> self.env.defenders[d].capture_buffer.value += 1 <NEW_LINE> <DEDENT> <DEDENT> if self.env.world.target.is_in_target(self.state.x, self.state.y): <NEW_LINE> <INDENT> self.entered.value = 1 <NEW_LINE> for d in range(len(self.env.defenders)): <NEW_LINE> <INDENT> self.env.defenders[d].enter_buffer.value += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return reward | docstring for ProcessIntruder. | 6259905607d97122c4218217 |
class VariableSetListPrinter(object): <NEW_LINE> <INDENT> GLOBALSET = None <NEW_LINE> def __init__(self, val): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> return str(self.val.address) <NEW_LINE> <DEDENT> def children(self): <NEW_LINE> <INDENT> if VariableSetListPrinter.GLOBALSET is None: <NEW_LINE> <INDENT> block = gdb.lookup_global_symbol('init_hash_global_variable_set').symtab.static_block() <NEW_LINE> VariableSetListPrinter.GLOBALSET = gdb.lookup_symbol( 'global_variable_set', block)[0].value().address <NEW_LINE> <DEDENT> ptr = self.val.address <NEW_LINE> i = 0 <NEW_LINE> while not isNullptr(ptr): <NEW_LINE> <INDENT> nm = '[%d] ' % (i) <NEW_LINE> yield (nm, ptr['set']) <NEW_LINE> if int(ptr['set']) == int(VariableSetListPrinter.GLOBALSET): <NEW_LINE> <INDENT> yield (nm, "global_variable_set") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield (nm, str(ptr['set'].dereference())) <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> ptr = ptr['next'] <NEW_LINE> <DEDENT> <DEDENT> def display_hint(self): <NEW_LINE> <INDENT> return 'map' | Print a variable_set_list. | 6259905676e4537e8c3f0af8 |
class CreateInvoiceForm(forms.ModelForm): <NEW_LINE> <INDENT> due_date = forms.DateTimeField(initial=datetime.now() + timedelta(days=30)) <NEW_LINE> create_date = forms.DateTimeField(initial=datetime.now()) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Invoice <NEW_LINE> fields = ('title', 'description', "amount", "due_date", "member", "create_date") <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> cleaned_data = super(CreateInvoiceForm, self).clean() <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> club = kwargs['club'] <NEW_LINE> self.member = forms.ModelChoiceField(queryset=Member.objects.filter(club=club)) | Form to create an invoice | 625990568e71fb1e983bd036 |
class NoCountPaginator(paginator.Paginator): <NEW_LINE> <INDENT> def get_count(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def page(self): <NEW_LINE> <INDENT> output = super(NoCountPaginator, self).page() <NEW_LINE> del output['meta']['total_count'] <NEW_LINE> return output | Paginator class that avoids a COUNT query and provides no total count. | 625990572ae34c7f260ac654 |
class TestCarnEating(object): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self.copy_carn_kills_herb = Carnivore.carn_kills_herb <NEW_LINE> self.copy_params = Carnivore.params.copy() <NEW_LINE> self.carn = Carnivore(20, 13) <NEW_LINE> self.herbs = [Herbivore(20, 13), Herbivore(12)] <NEW_LINE> <DEDENT> def teardown(self): <NEW_LINE> <INDENT> Carnivore.carn_kills_herb = self.copy_carn_kills_herb <NEW_LINE> Carnivore.params = self.copy_params <NEW_LINE> <DEDENT> def test_carn_full(self): <NEW_LINE> <INDENT> self.carn.set_parameters({'F': 0}) <NEW_LINE> nt.assert_list_equal(self.herbs, self.carn.eating(self.herbs), "Returns wrong list of herbivores") <NEW_LINE> <DEDENT> def test_carn_fitness_low(self): <NEW_LINE> <INDENT> nt.assert_list_equal(self.herbs, Carnivore(250, 87).eating(self.herbs), "Returns wrong list of herbivores") <NEW_LINE> <DEDENT> def test_carn_kills(self): <NEW_LINE> <INDENT> Carnivore.carn_kills_herb = lambda _, __: True <NEW_LINE> herb_one = Herbivore(60, 45) <NEW_LINE> herb_two = Herbivore(12) <NEW_LINE> nt.assert_list_equal([herb_two], self.carn.eating([herb_one, herb_two]), "Returns wrong list of herbivores") <NEW_LINE> nt.assert_equal(57.5, self.carn.weight_of_animal(), "Weight gain is wrong") <NEW_LINE> <DEDENT> def test_carn_do_not_kill(self): <NEW_LINE> <INDENT> Carnivore.carn_kills_herb = lambda _, __: False <NEW_LINE> nt.assert_list_equal(self.herbs, self.carn.eating(self.herbs)) <NEW_LINE> nt.assert_equal(20, self.carn.weight_of_animal(), "The animal should not gain weight") <NEW_LINE> <DEDENT> def test_carn_kills_all(self): <NEW_LINE> <INDENT> Carnivore.carn_kills_herb = lambda _, __: True <NEW_LINE> nt.assert_list_equal([], self.carn.eating(self.herbs), "Should not return any surviving herbivores") <NEW_LINE> nt.assert_equal(44, self.carn.weight_of_animal(), "Weight gain is wrong") | Collects tests that allows us to override method carn_kill_herb and change
default parameters. | 62599057379a373c97d9a591 |
class SalsahError(Exception): <NEW_LINE> <INDENT> pass | Error in getting data from SALSAH | 62599057e5267d203ee6ce5c |
class Detect(Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Thread.__init__(self) <NEW_LINE> self.capteur = 7 <NEW_LINE> GPIO.setmode(GPIO.BCM) <NEW_LINE> GPIO.setup(self.capteur, GPIO.IN) <NEW_LINE> time.sleep(2) <NEW_LINE> self.move = False <NEW_LINE> self.submit_mail = False <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> if settings.ALARME: <NEW_LINE> <INDENT> if GPIO.input(self.capteur): <NEW_LINE> <INDENT> self.move = True <NEW_LINE> if not self.submit_mail: <NEW_LINE> <INDENT> subject = "ALARME: Détection de mouvement !" <NEW_LINE> message = "Il y a actuellement une activation de l'alarme\nURL de l'alarme:http://{}".format(settings.ALLOWED_HOSTS[0]) <NEW_LINE> email = MyUser.objects.all().values_list('email') <NEW_LINE> email_list = [e[0] for e in email] <NEW_LINE> send = Sendmail() <NEW_LINE> send.sendmail(email_list, subject, message) <NEW_LINE> self.submit_mail = True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.move = False <NEW_LINE> self.submit_mail = False | Launche the detector. | 6259905710dbd63aa1c72130 |
class NumberRangeInput(GenericInput): <NEW_LINE> <INDENT> _sub_types = NumberInput._sub_types <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> var_attrs = ("main_caption", "r_max", "r_min", "shadow", "shadow_2", "user_can_add") <NEW_LINE> GenericInput.__init__(self,var_attrs) <NEW_LINE> self.__dict__["input_type"] = "number-range" <NEW_LINE> self.r_max = 1000000000 <NEW_LINE> self.r_min = 0 <NEW_LINE> <DEDENT> def check_response(self,proposed_response): <NEW_LINE> <INDENT> result = True <NEW_LINE> lo = float("-Infinity") <NEW_LINE> hi = float("Infinity") <NEW_LINE> if self.r_min: <NEW_LINE> <INDENT> lo = float(self.r_min) <NEW_LINE> <DEDENT> if self.r_max: <NEW_LINE> <INDENT> hi = float(self.r_max) <NEW_LINE> <DEDENT> entry_count = len(proposed_response) <NEW_LINE> if entry_count < 1: <NEW_LINE> <INDENT> result = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.user_can_add == False and entry_count > 1: <NEW_LINE> <INDENT> result = False <NEW_LINE> <DEDENT> <DEDENT> if result: <NEW_LINE> <INDENT> for (a,b) in proposed_response: <NEW_LINE> <INDENT> if lo <= a <= b <= hi: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def format_response(self,raw_response): <NEW_LINE> <INDENT> number_pattern = r"[\d.]+[\s,]+[\d.]+" <NEW_LINE> adj_response = re.findall(number_pattern,raw_response) <NEW_LINE> result = [] <NEW_LINE> for pair_of_strings in adj_response: <NEW_LINE> <INDENT> (a,b) = pair_of_strings.split(",") <NEW_LINE> pair_of_decimals = [float(a), float(b)] <NEW_LINE> result.append(pair_of_decimals) <NEW_LINE> <DEDENT> return result | The NumberRangeInput defines an input element where a user must specify a
low and a high value.
**A RangeInput response is a list of 2 numerical values**
The NumberRangeElement descends from GenericInput and tightens the
``_var_attrs`` whitelist to the following attributes:
-- main_caption
-- r_max
-- r_min
-- shadow
-- shadow_2
-- user_can_add
Class restricts modification of all other attributes.
Class also stipulates default values for min (zero) and max (one billion).
==================== ======================================================
Attribute Description
==================== ======================================================
DATA:
_sub_types same as Number-type inputs
input_type "date-range"
r_max 1,000,000,000 (one billion)
r_min 0 (zero)
FUNCTIONS:
check_response() method checks that each range is within min and max
format_response() method uses regular expressions to select range lists
==================== ====================================================== | 62599057009cb60464d02aa2 |
class AttnGRU: <NEW_LINE> <INDENT> def __init__(self, num_units, is_train, bn): <NEW_LINE> <INDENT> self.num_units = num_units <NEW_LINE> self.is_train = is_train <NEW_LINE> self.bn = bn <NEW_LINE> <DEDENT> def __call__(self, inputs, state, attention): <NEW_LINE> <INDENT> with tf.variable_scope('AttnGRU'): <NEW_LINE> <INDENT> r = fully_connected(tf.concat(1, [inputs, state]), self.num_units, 'AttnGRU_fc1', init_b=1.0, group_id=1) <NEW_LINE> r = batch_norm(r, 'AttnGRU_bn1', self.is_train, self.bn, 'sigmoid') <NEW_LINE> c = fully_connected(tf.concat(1, [inputs, r*state]), self.num_units, 'AttnGRU_fc2', init_b=0.0, group_id=1) <NEW_LINE> c = batch_norm(c, 'AttnGRU_bn2', self.is_train, self.bn, 'tanh') <NEW_LINE> new_state = attention * c + (1 - attention) * state <NEW_LINE> <DEDENT> return new_state | Attention-based GRU (used by the Episodic Memory Module). | 62599057f7d966606f74936f |
class WinLoginsGraph(BaseGraphPlugin): <NEW_LINE> <INDENT> NAME = 'WinLogins' <NEW_LINE> DISPLAY_NAME = 'Windows logins' <NEW_LINE> def generate(self): <NEW_LINE> <INDENT> query = 'tag:logon-event' <NEW_LINE> return_fields = [ 'computer_name', 'username', 'logon_type', 'logon_process' ] <NEW_LINE> events = self.event_stream( query_string=query, return_fields=return_fields) <NEW_LINE> for event in events: <NEW_LINE> <INDENT> computer_name = event['_source'].get('computer_name') <NEW_LINE> username = event['_source'].get('username') <NEW_LINE> logon_type = event['_source'].get('logon_type') <NEW_LINE> computer = self.graph.add_node(computer_name, {'type': 'computer'}) <NEW_LINE> user = self.graph.add_node(username, {'type': 'user'}) <NEW_LINE> self.graph.add_edge(user, computer, logon_type, event) <NEW_LINE> <DEDENT> self.graph.commit() <NEW_LINE> return self.graph | Graph plugin for Windows logins. | 62599057baa26c4b54d50813 |
class PurchaseReport(ReportMixin): <NEW_LINE> <INDENT> __name__ = 'report.purchase' <NEW_LINE> @classmethod <NEW_LINE> def get_context(cls, records, data): <NEW_LINE> <INDENT> Purchase = Pool().get('purchase.purchase') <NEW_LINE> Party = Pool().get('party.party') <NEW_LINE> Product = Pool().get('product.product') <NEW_LINE> report_context = super(PurchaseReport, cls).get_context(records, data) <NEW_LINE> domain = [ ('state', 'in', ['confirmed', 'processing', 'done']), ('purchase_date', '>=', data['start_date']), ('purchase_date', '<=', data['end_date']) ] <NEW_LINE> supplier_id = data.get('supplier') <NEW_LINE> product_id = data.get('product') <NEW_LINE> if supplier_id: <NEW_LINE> <INDENT> domain.append(('party', '=', supplier_id)) <NEW_LINE> <DEDENT> if product_id: <NEW_LINE> <INDENT> domain.append(('lines.product', '=', product_id)) <NEW_LINE> <DEDENT> purchases = Purchase.search(domain) <NEW_LINE> report_context.update({ 'purchases': purchases, 'supplier': supplier_id and Party(supplier_id), 'product': product_id and Product(product_id), }) <NEW_LINE> return report_context | Purchase Report | 62599057596a897236129067 |
class GenerationTransformer(tf.keras.models.Model): <NEW_LINE> <INDENT> def __init__(self, vocab_size, num_layers, model_dims, attention_depth, num_heads, hidden_dims): <NEW_LINE> <INDENT> super(GenerationTransformer, self).__init__() <NEW_LINE> self.embedding_layer = WordAndPositionalEmbedding(model_dims, vocab_size) <NEW_LINE> self.main_layers = [TransformerLayer(model_dims, attention_depth, num_heads, hidden_dims, True, False) for _ in range(num_layers)] <NEW_LINE> self.final_layer = tf.keras.layers.Dense(vocab_size, activation='softmax') <NEW_LINE> <DEDENT> def call(self, inputs, training): <NEW_LINE> <INDENT> values = self.embedding_layer(inputs) <NEW_LINE> for layer in self.main_layers: <NEW_LINE> <INDENT> values = layer(values, training=training) <NEW_LINE> <DEDENT> return self.final_layer(values) | Transformer language model | 6259905723849d37ff852633 |
class WhoisDk(WhoisEntry): <NEW_LINE> <INDENT> regex = { 'domain_name': r'Domain: *(.+)', 'creation_date': r'Registered: *(.+)', 'expiration_date': r'Expires: *(.+)', 'dnssec': r'Dnssec: *(.+)', 'status': r'Status: *(.+)', 'registrant_handle': r'Registrant\s*(?:.*\n){1}\s*Handle: *(.+)', 'registrant_name': r'Registrant\s*(?:.*\n){2}\s*Name: *(.+)', 'registrant_address': r'Registrant\s*(?:.*\n){3}\s*Address: *(.+)', 'registrant_zip_code': r'Registrant\s*(?:.*\n){4}\s*Postalcode: *(.+)', 'registrant_city': r'Registrant\s*(?:.*\n){5}\s*City: *(.+)', 'registrant_country': r'Registrant\s*(?:.*\n){6}\s*Country: *(.+)', 'name_servers': r'Nameservers\n *([\n\S\s]+)' } <NEW_LINE> def __init__(self, domain, text): <NEW_LINE> <INDENT> if 'No match for ' in text: <NEW_LINE> <INDENT> raise PywhoisError(text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> WhoisEntry.__init__(self, domain, text, self.regex) <NEW_LINE> <DEDENT> <DEDENT> def _preprocess(self, attr, value): <NEW_LINE> <INDENT> if attr == 'name_servers': <NEW_LINE> <INDENT> return [ line.split(":")[-1].strip() for line in value.split("\n") if line.startswith("Hostname") ] <NEW_LINE> <DEDENT> return super(WhoisDk, self)._preprocess(attr, value) | Whois parser for .dk domains
| 62599057b5575c28eb713783 |
class Clause(object): <NEW_LINE> <INDENT> def __init__(self, conjunction=True, exprs=None): <NEW_LINE> <INDENT> self._and = conjunction <NEW_LINE> self._exprs = [] if exprs is None else exprs <NEW_LINE> <DEDENT> def is_and(self): <NEW_LINE> <INDENT> return self._and <NEW_LINE> <DEDENT> def add(self, expr): <NEW_LINE> <INDENT> self._exprs.append(expr) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._exprs) | Set of expressions connected by AND or OR.
| 6259905721bff66bcd7241d3 |
class TestBudgetDetail(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testBudgetDetail(self): <NEW_LINE> <INDENT> pass | BudgetDetail unit test stubs | 6259905701c39578d7f141ee |
class SentenceModel(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.session = tf.Session() <NEW_LINE> self.graph = tf.get_default_graph() <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def model(self) -> tf.keras.models.Model: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def compile_model(self, optimizer): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def predict_txt(self, txt: AnyStr, batch_size: int, input_mapper: 'InputMapper', tokens: Optional[List[Token]] = None, include_tokens: bool = True) -> List['Sentence']: <NEW_LINE> <INDENT> if tokens is None: <NEW_LINE> <INDENT> tokens = list(tokenize(txt)) <NEW_LINE> <DEDENT> if len(tokens) == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> inputs, _, weights = input_mapper.map_input([(txt, tokens)], include_labels=False) <NEW_LINE> with self.graph.as_default(), self.session.as_default(): <NEW_LINE> <INDENT> outputs, _ = self.model.predict(inputs, batch_size=batch_size) <NEW_LINE> <DEDENT> outputs = np.rint(outputs) <NEW_LINE> not_padding = np.nonzero(weights) <NEW_LINE> outputs = ['B' if x == 1 else 'I' for x in outputs[not_padding]] <NEW_LINE> results = [] <NEW_LINE> prev = None <NEW_LINE> sentence = None <NEW_LINE> for token, label in zip(tokens, outputs): <NEW_LINE> <INDENT> if (label == 'B') or (label == 'O' and (prev is None or prev[1] != 'O')): <NEW_LINE> <INDENT> if sentence is not None: <NEW_LINE> <INDENT> sentence.end_index = prev[0].end_index <NEW_LINE> results.append(sentence) <NEW_LINE> <DEDENT> sentence = Sentence( start_index=token.start_index, end_index=-1, category='S' if label is 'B' else 'U', tokens=None ) <NEW_LINE> if include_tokens: <NEW_LINE> <INDENT> sentence.tokens = [] <NEW_LINE> <DEDENT> <DEDENT> if include_tokens: <NEW_LINE> <INDENT> sentence.tokens.append(token) <NEW_LINE> <DEDENT> prev = token, label <NEW_LINE> <DEDENT> if sentence is not None: <NEW_LINE> <INDENT> sentence.end_index = prev[0].end_index <NEW_LINE> results.append(sentence) <NEW_LINE> <DEDENT> return results | Base class for a sentence model
Attributes
----------
session: tf.Session
The tensorflow session.
graph: tf.Graph
The tensorflow graph. | 625990578e7ae83300eea5fc |
class Logger(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def create_logger(self): <NEW_LINE> <INDENT> logger = logging.getLogger(__name__) <NEW_LINE> logger.setLevel(logging.INFO) <NEW_LINE> ch = logging.StreamHandler() <NEW_LINE> ch.setLevel(logging.INFO) <NEW_LINE> formatter_f = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') <NEW_LINE> formatter_c = logging.Formatter('%(levelname)s - %(message)s') <NEW_LINE> ch.setFormatter(formatter_c) <NEW_LINE> logger.addHandler(ch) <NEW_LINE> return logger | Logger object to use in functions
| 62599057d53ae8145f9199d1 |
class Attention(Layer): <NEW_LINE> <INDENT> def __init__(self, attention_dim, **kwargs): <NEW_LINE> <INDENT> self.attention_dim = attention_dim <NEW_LINE> super(Attention, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> self.hidden_dim = input_shape[2] <NEW_LINE> self.W = self.add_weight(shape=(self.hidden_dim, self.attention_dim), name='att_W', initializer='glorot_uniform', trainable=True) <NEW_LINE> self.b = self.add_weight(shape=(self.attention_dim,), name='att_b', initializer='zeros', trainable=True) <NEW_LINE> self.u = self.add_weight(shape=(self.attention_dim,), name='att_u', initializer='glorot_uniform', trainable=True) <NEW_LINE> super(Attention, self).build(input_shape) <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return (input_shape[0], self.hidden_dim) <NEW_LINE> <DEDENT> def call(self, x, mask=None): <NEW_LINE> <INDENT> a = K.tanh(K.dot(x, self.W) + self.b) <NEW_LINE> mul_a_u = a * self.u <NEW_LINE> dot_a_u = K.sum(mul_a_u, axis=2) <NEW_LINE> alpha_num = K.exp(dot_a_u) <NEW_LINE> alpha_den = K.sum(alpha_num, axis=1) <NEW_LINE> alpha_den = K.expand_dims(alpha_den) <NEW_LINE> alpha = alpha_num / alpha_den <NEW_LINE> alpha = K.expand_dims(alpha) <NEW_LINE> weighted_input = x * alpha <NEW_LINE> return K.sum(weighted_input, axis=1) | Attention operation for temporal data.
# Input shape
3D tensor with shape: `(samples, steps, features)`.
# Output shape
2D tensor with shape: `(samples, features)`. | 625990574a966d76dd5f0460 |
class service(object): <NEW_LINE> <INDENT> def __init__(self, serviceName, servicePort, serviceIP=""): <NEW_LINE> <INDENT> self.serviceName = serviceName <NEW_LINE> self.servicePort = servicePort <NEW_LINE> self.serviceIP = serviceIP <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s %s:%s" % (self.serviceName, str(self.serviceIP), self.servicePort) | Stores variables for a service | 62599057097d151d1a2c25da |
class FengHuangSpider(RedisCrawlSpider): <NEW_LINE> <INDENT> name = "fenghuang" <NEW_LINE> redis_key = settings.caijing_start_urls <NEW_LINE> allowed_domains = ['news.ifeng.com'] <NEW_LINE> rules = (Rule( LinkExtractor( allow=r"news.ifeng.com/a/\d{8}/\d*",), callback="parse_article", follow=True), ) <NEW_LINE> def parse_article(self, response): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> article_id = re.search(r'\d+', response.url).group() <NEW_LINE> title = response.xpath(ls.FENGHUANG_TITLE).extract_first("") <NEW_LINE> content = ''.join(response.xpath(ls.FENGHUANG_CONTENT).extract()) <NEW_LINE> item = ArticleItem() <NEW_LINE> item['articleId'] = article_id <NEW_LINE> item['articleTitle'] = title <NEW_LINE> item['articleContent'] = content <NEW_LINE> yield item <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.logger.info('item in article failed') | 凤凰网 Spider | 62599057460517430c432b09 |
class AllAppsAllLogsSummaryConverter(AllLogsConverter): <NEW_LINE> <INDENT> all_in_one_gvc_tljs_log_path = "/data/mixs_logs/%s/apps/*/%s/%s" % (AllLogsConverter.json_log_dir, AllLogsConverter.all_in_one_log_dir, AllLogsConverter.all_in_one_gvc_tljs_file) <NEW_LINE> apps_stats_path = "%s/%s" % (AllLogsConverter.stats_log_path, AllLogsConverter.app_stats) <NEW_LINE> def __init__(self, json_apps_log_path = AllLogsConverter.json_apps_log_path): <NEW_LINE> <INDENT> super(AllAppsAllLogsSummaryConverter, self).__init__() <NEW_LINE> logger = logging.getLogger("AllAppsAllLogsSummaryConverter") <NEW_LINE> logger.setLevel(logging.INFO) <NEW_LINE> logging.basicConfig() <NEW_LINE> self.logger = logger <NEW_LINE> logger.info("init starts") <NEW_LINE> logger.info("init finished") <NEW_LINE> pass <NEW_LINE> <DEDENT> def convert(self, ): <NEW_LINE> <INDENT> self.logger.info("convert starts") <NEW_LINE> st = time.time() <NEW_LINE> app_stats = json.loads("{}") <NEW_LINE> for fin in glob.glob(self.all_in_one_gvc_tljs_log_path): <NEW_LINE> <INDENT> app_summary_log = json.load(open(fin, "r")) <NEW_LINE> appname = app_summary_log[self.APP_NAME] <NEW_LINE> app_stats[appname] = json.loads("{}") <NEW_LINE> self._init_app_stats_log(app_stats[appname]) <NEW_LINE> log_info = app_summary_log[self.LOG_INFO] <NEW_LINE> for t in log_info: <NEW_LINE> <INDENT> app_stats[appname][self.BASE_INFO] += 1 if self.BASE_INFO in log_info[t] else 0 <NEW_LINE> app_stats[appname][self.LOCATION] += 1 if self.LOCATION in log_info[t] else 0 <NEW_LINE> stats = log_info[t][self.STATS] <NEW_LINE> for info in stats: <NEW_LINE> <INDENT> app_stats[appname][info] += stats[info] <NEW_LINE> pass <NEW_LINE> <DEDENT> pass <NEW_LINE> <DEDENT> pass <NEW_LINE> <DEDENT> fpout = open(self.apps_stats_path, "w") <NEW_LINE> fpout.write(json.dumps(app_stats)) <NEW_LINE> fpout.close() <NEW_LINE> et = time.time() <NEW_LINE> self.logger.info("%f [s]" % (et - st)) <NEW_LINE> self.logger.info("convert finished") <NEW_LINE> pass <NEW_LINE> <DEDENT> def _init_app_stats_log(self, app_stats): <NEW_LINE> <INDENT> app_stats[self.BASE_INFO] = 0 <NEW_LINE> app_stats[self.LOCATION] = 0 <NEW_LINE> app_stats[self.WIFI_APS] = 0 <NEW_LINE> app_stats[self.WIFI_CONNECTED_AP] = 0 <NEW_LINE> app_stats[self.SENSOR] = 0 <NEW_LINE> app_stats[self.BLUETOOTH] = 0 <NEW_LINE> app_stats[self.BATTERY] = 0 <NEW_LINE> app_stats[self.HEADSET_PLUG] = 0 <NEW_LINE> app_stats[self.NETWORK_INFO] = 0 <NEW_LINE> app_stats[self.TELEPHONY] = 0 <NEW_LINE> app_stats[self.NEIGHBORING_CELL_INFO] = 0 <NEW_LINE> pass | Convert all json file, or all_in_one_json_log for each user to one coordinated file for each user. | 625990577047854f4634092f |
class InferenceWrapperBase(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def build_model(self, model_config): <NEW_LINE> <INDENT> tf.compat.v1.logging.fatal("Please implement build_model in subclass") <NEW_LINE> <DEDENT> def _create_restore_fn(self, checkpoint_path, saver): <NEW_LINE> <INDENT> if tf.compat.v1.gfile.IsDirectory(checkpoint_path): <NEW_LINE> <INDENT> checkpoint_path = tf.train.latest_checkpoint(checkpoint_path) <NEW_LINE> if not checkpoint_path: <NEW_LINE> <INDENT> raise ValueError("No checkpoint file found in: %s" % checkpoint_path) <NEW_LINE> <DEDENT> <DEDENT> def _restore_fn(sess): <NEW_LINE> <INDENT> tf.compat.v1.logging.info("Loading model from checkpoint: %s", checkpoint_path) <NEW_LINE> saver.restore(sess, checkpoint_path) <NEW_LINE> tf.compat.v1.logging.info("Successfully loaded checkpoint: %s", os.path.basename(checkpoint_path)) <NEW_LINE> <DEDENT> return _restore_fn <NEW_LINE> <DEDENT> def build_graph_from_config(self, model_config, checkpoint_path): <NEW_LINE> <INDENT> tf.compat.v1.logging.info("Building model.") <NEW_LINE> self.build_model(model_config) <NEW_LINE> saver = tf.compat.v1.train.Saver() <NEW_LINE> return self._create_restore_fn(checkpoint_path, saver) <NEW_LINE> <DEDENT> def build_graph_from_proto(self, graph_def_file, saver_def_file, checkpoint_path): <NEW_LINE> <INDENT> tf.compat.v1.logging.info("Loading GraphDef from file: %s", graph_def_file) <NEW_LINE> graph_def = tf.GraphDef() <NEW_LINE> with tf.compat.v1.gfile.FastGFile(graph_def_file, "rb") as f: <NEW_LINE> <INDENT> graph_def.ParseFromString(f.read()) <NEW_LINE> <DEDENT> tf.import_graph_def(graph_def, name="") <NEW_LINE> tf.compat.v1.logging.info("Loading SaverDef from file: %s", saver_def_file) <NEW_LINE> saver_def = tf.compat.v1.train.SaverDef() <NEW_LINE> with tf.compat.v1.gfile.FastGFile(saver_def_file, "rb") as f: <NEW_LINE> <INDENT> saver_def.ParseFromString(f.read()) <NEW_LINE> <DEDENT> saver = tf.compat.v1.train.Saver(saver_def=saver_def) <NEW_LINE> return self._create_restore_fn(checkpoint_path, saver) <NEW_LINE> <DEDENT> def feed_image(self, sess, encoded_image): <NEW_LINE> <INDENT> tf.compat.v1.logging.fatal("Please implement feed_image in subclass") <NEW_LINE> <DEDENT> def inference_step(self, sess, input_feed, state_feed): <NEW_LINE> <INDENT> tf.compat.v1.logging.fatal("Please implement inference_step in subclass") | Base wrapper class for performing inference with an image-to-text model. | 625990578e7ae83300eea5fd |
class FashionMNIST(AbstractDomainInterface): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(FashionMNIST, self).__init__() <NEW_LINE> im_transformer = transforms.Compose([transforms.ToTensor()]) <NEW_LINE> root_path = './workspace/datasets/fmnist' <NEW_LINE> self.D1_train_ind = torch.arange(0, 50000).int() <NEW_LINE> self.D1_valid_ind = torch.arange(50000, 60000).int() <NEW_LINE> self.D1_test_ind = torch.arange(0, 10000).int() <NEW_LINE> self.D2_valid_ind = torch.arange(0, 60000).int() <NEW_LINE> self.D2_test_ind = torch.arange(0, 10000).int() <NEW_LINE> self.ds_train = datasets.FashionMNIST(root_path, train=True, transform=im_transformer, download=True) <NEW_LINE> self.ds_test = datasets.FashionMNIST(root_path, train=False, transform=im_transformer, download=True) <NEW_LINE> <DEDENT> def get_D1_train(self): <NEW_LINE> <INDENT> return SubDataset(self.name, self.ds_train, self.D1_train_ind) <NEW_LINE> <DEDENT> def get_D1_valid(self): <NEW_LINE> <INDENT> return SubDataset(self.name, self.ds_train, self.D1_valid_ind, label=0) <NEW_LINE> <DEDENT> def get_D1_test(self): <NEW_LINE> <INDENT> return SubDataset(self.name, self.ds_test, self.D1_test_ind, label=0) <NEW_LINE> <DEDENT> def get_D2_valid(self, D1): <NEW_LINE> <INDENT> assert self.is_compatible(D1) <NEW_LINE> return SubDataset(self.name, self.ds_train, self.D2_valid_ind, label=1, transform=D1.conformity_transform()) <NEW_LINE> <DEDENT> def get_D2_test(self, D1): <NEW_LINE> <INDENT> assert self.is_compatible(D1) <NEW_LINE> return SubDataset(self.name, self.ds_test, self.D2_test_ind, label=1, transform=D1.conformity_transform()) <NEW_LINE> <DEDENT> def conformity_transform(self): <NEW_LINE> <INDENT> return transforms.Compose([transforms.ToPILImage(), transforms.Resize((28, 28)), transforms.Grayscale(), transforms.ToTensor() ]) | FashionMNIST: 60,000 train + 10,000 test.
D1: (50,000 train + 10,000 valid) + (10,000 test)
D2: 60,000 valid + 10,000 test. | 62599057462c4b4f79dbcf75 |
class CreateLicenseeView(LoginRequiredMixin, SuccessMessageMixin, CreateView): <NEW_LINE> <INDENT> model = Licensee <NEW_LINE> fields = ['first_name', 'last_name', 'designation', 'organization_name', 'mobile', 'email', 'status'] <NEW_LINE> template_name = 'licensee_form.html' <NEW_LINE> success_message = "%(first_name)s was created successfully" <NEW_LINE> success_url = reverse_lazy('list_licensees') <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> form = self.get_form() <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> licensee = form.save() <NEW_LINE> line1 = request.POST['line1'] <NEW_LINE> line2 = request.POST['line2'] <NEW_LINE> city = request.POST['location'] <NEW_LINE> country = request.POST['country'] <NEW_LINE> state = request.POST['state'] <NEW_LINE> zip_code = request.POST['zip'] <NEW_LINE> if city and country and state and zip_code: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> licensee.address = Address.objects.create(line1=line1, line2=line2, city_or_village=city, state=state, country=country,zip_code=int(zip_code)) <NEW_LINE> licensee.save() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error("{}, error occured while saving address of a licensee.".format(e)) <NEW_LINE> messages.error(request, "Error occured while saving address of a licensee.") <NEW_LINE> return redirect('add_licensee') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logger.error(form.errors) <NEW_LINE> messages.error(request, form.errors) <NEW_LINE> return redirect('add_licensee') <NEW_LINE> <DEDENT> messages.success(request, "{}, licensee created successfully.".format(licensee.full_name)) <NEW_LINE> return HttpResponseRedirect(reverse('list_licensees')) | Create new licensee | 6259905710dbd63aa1c72131 |
class Host(): <NEW_LINE> <INDENT> def __init__(self, ip_address, mac_address): <NEW_LINE> <INDENT> self.ip_address = ip_address <NEW_LINE> self.mac_address = mac_address <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s%s" % (self.ip_address, self.mac_address) | :return: string with IP address, string with mac address | 6259905738b623060ffaa306 |
class Crash(angr.SimProcedure): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> DeepAngr(procedure=self).api_crash() | Implements DeepState_Crash, which notifies us of a crashing test. | 6259905763d6d428bbee3d3f |
class Amenity(BaseModel): <NEW_LINE> <INDENT> name = "" <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(self, *args, **kwargs) | class Amenity that inherits from BaseModel
public class attribute:
name: string - empty string | 6259905707d97122c421821a |
class EdisonArduinoGpioIn(GpioIn, _EdisonArduinoGpioBase): <NEW_LINE> <INDENT> def __init__(self, pin_config): <NEW_LINE> <INDENT> super().__init__(pin_config) <NEW_LINE> gpio.configure_out(self._out_pin) <NEW_LINE> gpio.configure_in(self._pullup_pin) <NEW_LINE> <DEDENT> @property <NEW_LINE> def pullup(self): <NEW_LINE> <INDENT> return gpio.get_direction(self._pullup_pin) == gpio.DIRECTION_OUT and gpio.get(self._pullup_pin) <NEW_LINE> <DEDENT> @pullup.setter <NEW_LINE> def pullup(self, value): <NEW_LINE> <INDENT> if value: <NEW_LINE> <INDENT> gpio.configure_out(self._pullup_pin, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> gpio.configure_in(self._pullup_pin) | Intel Edison Arduino board GPIO input driver.
Provides automated muxing. | 6259905799cbb53fe683244f |
class VirtKey(object): <NEW_LINE> <INDENT> def __init__(self, hyper, id_, opts): <NEW_LINE> <INDENT> self.opts = opts <NEW_LINE> self.hyper = hyper <NEW_LINE> self.id = id_ <NEW_LINE> path = os.path.join(self.opts["pki_dir"], "virtkeys", hyper) <NEW_LINE> if not os.path.isdir(path): <NEW_LINE> <INDENT> os.makedirs(path) <NEW_LINE> <DEDENT> self.path = os.path.join(path, id_) <NEW_LINE> <DEDENT> def accept(self, pub): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with salt.utils.files.fopen(self.path, "r") as fp_: <NEW_LINE> <INDENT> expiry = int(fp_.read()) <NEW_LINE> <DEDENT> <DEDENT> except (OSError, IOError): <NEW_LINE> <INDENT> log.error( "Request to sign key for minion '%s' on hyper '%s' " "denied: no authorization", self.id, self.hyper, ) <NEW_LINE> return False <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> log.error("Invalid expiry data in %s", self.path) <NEW_LINE> return False <NEW_LINE> <DEDENT> if (time.time() - expiry) > 600: <NEW_LINE> <INDENT> log.warning( 'Request to sign key for minion "%s" on hyper "%s" denied: ' "authorization expired", self.id, self.hyper, ) <NEW_LINE> return False <NEW_LINE> <DEDENT> pubfn = os.path.join(self.opts["pki_dir"], "minions", self.id) <NEW_LINE> with salt.utils.files.fopen(pubfn, "w+") as fp_: <NEW_LINE> <INDENT> fp_.write(pub) <NEW_LINE> <DEDENT> self.void() <NEW_LINE> return True <NEW_LINE> <DEDENT> def authorize(self): <NEW_LINE> <INDENT> with salt.utils.files.fopen(self.path, "w+") as fp_: <NEW_LINE> <INDENT> fp_.write( str(int(time.time())) ) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def void(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.unlink(self.path) <NEW_LINE> return True <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return False | Used to manage key signing requests. | 62599057a8ecb03325872787 |
class TacTemp: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<TacTemp %s>"%self.name | should be Immutable | 62599057f7d966606f749370 |
class XMarkerTriangle(XMarkerBase): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(XMarkerTriangle, self).__init__(parent) <NEW_LINE> self.update() <NEW_LINE> <DEDENT> def setPath(self): <NEW_LINE> <INDENT> path = QPainterPath() <NEW_LINE> path.moveTo(self._wrs*0.5,self._hrs*0.2) <NEW_LINE> path.lineTo(self._wrs*0.8,self._hrs*0.8) <NEW_LINE> path.lineTo(self._wrs*0.2,self._hrs*0.8) <NEW_LINE> path.closeSubpath() <NEW_LINE> return path | A Triangle | 625990573c8af77a43b689f8 |
class TestScrewer(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.screwer = Screwer('5') <NEW_LINE> <DEDENT> def test_setup_screwer(self): <NEW_LINE> <INDENT> self.assertEqual(self.screwer.mode, '5') <NEW_LINE> <DEDENT> def test_exit_screwer(self): <NEW_LINE> <INDENT> self.assertEqual(self.screwer._end_screwer(), -99) | Tests the Jobset Class | 6259905707d97122c421821b |
class MigrationTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.ms = gdata.apps.migration.service.MigrationService( email=admin_email, password=admin_password, domain=domain) <NEW_LINE> self.ms.ProgrammaticLogin() <NEW_LINE> <DEDENT> def testImportMail(self): <NEW_LINE> <INDENT> self.ms.ImportMail(user_name=username, mail_message=MESS%('Test subject', 'Test body'), mail_item_properties=['IS_STARRED'], mail_labels=['Test']) <NEW_LINE> <DEDENT> def testBatch(self): <NEW_LINE> <INDENT> for i in xrange(1,10): <NEW_LINE> <INDENT> self.ms.AddBatchEntry(mail_message=MESS%('Test batch %d'%i, 'Test batch'), mail_item_properties=['IS_INBOX'], mail_labels=['Test', 'Batch']) <NEW_LINE> <DEDENT> self.ms.SubmitBatch(user_name=username) | Test for the MigrationService. | 6259905723e79379d538da6c |
class LocalConfig(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def parseConfigFile(self): <NEW_LINE> <INDENT> config = ConfigParser.ConfigParser() <NEW_LINE> cfg_file = os.path.join(getUserFolder(),DEFAULT_CONFIG_FILE_NAME) <NEW_LINE> config.read(cfg_file) <NEW_LINE> self.source_dir = config.get("Properties", SOURCE_DIR_NAME) <NEW_LINE> self.out_dir = config.get("Properties", OUTPUT_DIR_NAME) | Stores the local paths to the datasets | 6259905716aa5153ce401a54 |
class ClipboardGrabFormat(BaseGrabFormat): <NEW_LINE> <INDENT> def _can_read(self, request): <NEW_LINE> <INDENT> if request.mode[1] not in 'i?': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if request.filename != '<clipboard>': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return bool(self._init_pillow()) <NEW_LINE> <DEDENT> def _get_data(self, index): <NEW_LINE> <INDENT> ImageGrab = self._init_pillow() <NEW_LINE> assert ImageGrab <NEW_LINE> pil_im = ImageGrab.grabclipboard() <NEW_LINE> if pil_im is None: <NEW_LINE> <INDENT> raise RuntimeError('There seems to be no image data on the ' 'clipboard now.') <NEW_LINE> <DEDENT> im = np.asarray(pil_im) <NEW_LINE> return im, {} | The ClipboardGrabFormat provided a means to grab image data from
the clipboard, using the uri "<clipboard>"
This functionality is provided via Pillow. Note that "<clipboard>" is
only supported on Windows.
Parameters for reading
----------------------
No parameters. | 6259905724f1403a92686387 |
class DenseDilated(nn.Module): <NEW_LINE> <INDENT> def __init__(self, k=9, dilation=1, stochastic=False, epsilon=0.0): <NEW_LINE> <INDENT> super(DenseDilated, self).__init__() <NEW_LINE> self.dilation = dilation <NEW_LINE> self.stochastic = stochastic <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.k = k <NEW_LINE> <DEDENT> def forward(self, edge_index): <NEW_LINE> <INDENT> if self.stochastic: <NEW_LINE> <INDENT> if torch.rand(1) < self.epsilon and self.training: <NEW_LINE> <INDENT> num = self.k * self.dilation <NEW_LINE> randnum = torch.randperm(num)[:self.k] <NEW_LINE> edge_index = edge_index[:, :, :, randnum] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> edge_index = edge_index[:, :, :, ::self.dilation] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> edge_index = edge_index[:, :, :, ::self.dilation] <NEW_LINE> <DEDENT> return edge_index | Find dilated neighbor from neighbor list
edge_index: (2, batch_size, num_points, k) | 625990570a50d4780f706877 |
class ImmutableCollection(object): <NEW_LINE> <INDENT> def __init__(self, init_data=None): <NEW_LINE> <INDENT> self.class_name = self.__class__.__name__ <NEW_LINE> if init_data: <NEW_LINE> <INDENT> if isinstance(init_data, dict): <NEW_LINE> <INDENT> self.collection = init_data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.collection = {} <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.collection = {} <NEW_LINE> <DEDENT> <DEDENT> def map(self, map_dict={}): <NEW_LINE> <INDENT> if not map_dict: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.collection = merge_dict(map_dict, self.collection) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> def obj_mapper(d): <NEW_LINE> <INDENT> return namedtuple(self.class_name, d.keys())(*d.values()) <NEW_LINE> <DEDENT> data = json.dumps(self.collection) <NEW_LINE> return json.loads(data, object_hook=obj_mapper) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create(cls, data): <NEW_LINE> <INDENT> return cls(data).get() | Construct an immutable collection from a dictionary. | 625990573eb6a72ae038bbd0 |
class ActorCritic(object): <NEW_LINE> <INDENT> def __init__(self, env, inSize, outSize,layers,gamma, lrPi, lrV ): <NEW_LINE> <INDENT> self.inSize = inSize <NEW_LINE> self.outSize = outSize <NEW_LINE> self.pi = Pi(inSize, outSize, layers) <NEW_LINE> self.V = V(inSize, outSize, layers) <NEW_LINE> self.gamma = gamma <NEW_LINE> self.env = env <NEW_LINE> self.loss_V = nn.SmoothL1Loss() <NEW_LINE> self.loss_Pi = nn.CrossEntropyLoss() <NEW_LINE> self.optim_V = torch.optim.Adam(self.V.parameters(), lr = lrV) <NEW_LINE> self.optim_Pi = torch.optim.Adam(self.pi.parameters(), lr = lrPi) <NEW_LINE> <DEDENT> def act(self, observation): <NEW_LINE> <INDENT> self.s = torch.FloatTensor(observation) <NEW_LINE> self.prob_a = self.pi.forward(self.s) <NEW_LINE> self.a = torch.distributions.Categorical(self.prob_a).sample() <NEW_LINE> return self.a.tolist() <NEW_LINE> <DEDENT> def learn(self, ob, reward, done): <NEW_LINE> <INDENT> s = torch.FloatTensor(self.s) <NEW_LINE> a = self.a <NEW_LINE> s1 = torch.FloatTensor(ob) <NEW_LINE> value = self.V.forward(s1) <NEW_LINE> y1 = Variable(reward + self.gamma * self.V.forward(s1).detach(), requires_grad = False) <NEW_LINE> y = self.V.forward(s) <NEW_LINE> loss_V = self.loss_V(y, y1) <NEW_LINE> self.optim_V.zero_grad() <NEW_LINE> loss_V.backward() <NEW_LINE> self.optim_V.step() <NEW_LINE> A = float(reward) + self.gamma*self.V.forward(s1)[a].detach() - self.V.forward(s)[a].detach() <NEW_LINE> A = A.detach() <NEW_LINE> loss_Pi = self.loss_Pi(torch.log(self.pi.forward(s).view(1,self.outSize)), torch.tensor([a])) * A <NEW_LINE> self.optim_Pi.zero_grad() <NEW_LINE> loss_Pi.backward() <NEW_LINE> self.optim_Pi.step() | The world's simplest agent! | 62599057adb09d7d5dc0badb |
class TestLogout(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpTestData(cls): <NEW_LINE> <INDENT> super(TestLogout, cls).setUpTestData() <NEW_LINE> cls.tester = User.objects.create_user(username='authtester', email='[email protected]', password='password') <NEW_LINE> cls.logout_url = reverse('tcms-logout') <NEW_LINE> <DEDENT> def test_logout_then_redirect_to_next(self): <NEW_LINE> <INDENT> self.client.login(username=self.tester.username, password='password') <NEW_LINE> response = self.client.get(self.logout_url, follow=True) <NEW_LINE> self.assertRedirects(response, reverse('tcms-login')) <NEW_LINE> <DEDENT> def test_logout_then_goto_next(self): <NEW_LINE> <INDENT> self.client.login(username=self.tester.username, password='password') <NEW_LINE> next_url = reverse('plans-all') <NEW_LINE> response = self.client.get(self.logout_url, {'next': next_url}, follow=True) <NEW_LINE> self.assertRedirects(response, next_url) | Test for logout view method | 6259905732920d7e50bc75b7 |
class TaskLoggingConfiguration(_messages.Message): <NEW_LINE> <INDENT> @encoding.MapUnrecognizedFields('additionalProperties') <NEW_LINE> class LogLevelsValue(_messages.Message): <NEW_LINE> <INDENT> class AdditionalProperty(_messages.Message): <NEW_LINE> <INDENT> class ValueValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> LEVEL_UNSPECIFIED = 0 <NEW_LINE> ALL = 1 <NEW_LINE> TRACE = 2 <NEW_LINE> DEBUG = 3 <NEW_LINE> INFO = 4 <NEW_LINE> WARN = 5 <NEW_LINE> ERROR = 6 <NEW_LINE> FATAL = 7 <NEW_LINE> OFF = 8 <NEW_LINE> <DEDENT> key = _messages.StringField(1) <NEW_LINE> value = _messages.EnumField('ValueValueValuesEnum', 2) <NEW_LINE> <DEDENT> additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True) <NEW_LINE> <DEDENT> logLevels = _messages.MessageField('LogLevelsValue', 1) | Logging configuration for the task.
Messages:
LogLevelsValue: Map of logger name to log4j log level.
Fields:
logLevels: Map of logger name to log4j log level. | 625990577047854f46340931 |
class MineCraftProxy(Extension): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> name = type(self).__name__ <NEW_LINE> super().__init__(name) <NEW_LINE> self.mc = None <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> message = self.read() <NEW_LINE> topic = message.get('topic') <NEW_LINE> data = message.get("payload") <NEW_LINE> if topic == 'init': <NEW_LINE> <INDENT> self.mc = MineCraft(data) <NEW_LINE> self.publish({"id": 'minecraft', "topic": "sensor", "is_connected": True}) <NEW_LINE> <DEDENT> if self.mc: <NEW_LINE> <INDENT> fn = self.mc.match().get(topic) <NEW_LINE> if fn: <NEW_LINE> <INDENT> res = fn(data) <NEW_LINE> if res.get('topic') == 'sensor': <NEW_LINE> <INDENT> self.publish(res) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.publish({"id": 'minecraft', "topic": "sensor", "is_connected": False}) | 继承 Extension 之后你将获得:
self.actuator_sub
self.sensor_pub
self.logger | 62599057462c4b4f79dbcf77 |
class SearchDialog(ActionCancelDialog): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> ActionCancelDialog.__init__(self, parent, title="Search", actionButtonLabel="Filter") <NEW_LINE> searchLabel = wx.StaticText(self.panel, wx.NewId(), 'Query:') <NEW_LINE> self.query = wx.TextCtrl(self.panel, wx.NewId(), size=(200, -1)) <NEW_LINE> sizer = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> sizer.Add(searchLabel, 0, wx.ALL, 5) <NEW_LINE> sizer.Add(self.query, 0, wx.ALL, 5) <NEW_LINE> self.setSizer(sizer) <NEW_LINE> <DEDENT> def getSearchQuery(self): <NEW_LINE> <INDENT> return self.query.GetValue() | The Search dialog.
It allows to search for a query in the list of entries.
@group Events: __on*
@sort: __*, g* | 625990578e71fb1e983bd03b |
class BasicBlock(nn.Module): <NEW_LINE> <INDENT> expansion = 1 <NEW_LINE> __constants__ = ['downsample'] <NEW_LINE> def __init__(self, inplanes, planes, stride=1, downsample=None, groups=1, base_width=64, dilation=1, norm_layer=None): <NEW_LINE> <INDENT> super(BasicBlock, self).__init__() <NEW_LINE> if norm_layer is None: <NEW_LINE> <INDENT> norm_layer = nn.BatchNorm2d <NEW_LINE> <DEDENT> if groups != 1 or base_width != 64: <NEW_LINE> <INDENT> raise ValueError('BasicBlock only supports groups=1 and base_width=64') <NEW_LINE> <DEDENT> if dilation > 1: <NEW_LINE> <INDENT> raise NotImplementedError("Dilation > 1 not supported in BasicBlock") <NEW_LINE> <DEDENT> self.conv1 = conv3x3(inplanes, planes, stride) <NEW_LINE> self.bn1 = norm_layer(planes) <NEW_LINE> self.relu = nn.ReLU(inplace=True) <NEW_LINE> self.conv2 = conv3x3(planes, planes) <NEW_LINE> self.bn2 = norm_layer(planes) <NEW_LINE> self.downsample = downsample <NEW_LINE> self.stride = stride <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> identity = x <NEW_LINE> out = self.conv1(x) <NEW_LINE> out = self.bn1(out) <NEW_LINE> out = self.relu(out) <NEW_LINE> out = self.conv2(out) <NEW_LINE> out = self.bn2(out) <NEW_LINE> if self.downsample is not None: <NEW_LINE> <INDENT> identity = self.downsample(x) <NEW_LINE> <DEDENT> out += identity <NEW_LINE> out = self.relu(out) <NEW_LINE> return out | A wrapup of a residual block | 625990570fa83653e46f6457 |
class ParameterNumberError(Exception): <NEW_LINE> <INDENT> def __init__(self, message, *args): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> super(ParameterNumberError, self).__init__(message, *args) | Raise an error when the number of parameter given by the
user do not correspond to the number of parameters detected
in the symbolic expression of the immittance. | 625990578da39b475be0475e |
class ResultViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = models.Result.objects.all() <NEW_LINE> serializer_class = serializers.ResultSerializer <NEW_LINE> permission_classes = [permissions.IsAuthenticated] | ViewSet for the Result class | 6259905794891a1f408ba1af |
class Article(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'article' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> uid = db.Column(db.Integer, nullable=False, index=True) <NEW_LINE> content = db.Column(db.Text) <NEW_LINE> created = db.Column(db.DateTime, nullable=False) <NEW_LINE> updated = db.Column(db.DateTime, nullable=False) <NEW_LINE> @property <NEW_LINE> def user(self): <NEW_LINE> <INDENT> if not hasattr(self, '_user'): <NEW_LINE> <INDENT> self._user = User.query.get(self.uid) <NEW_LINE> <DEDENT> return self._user | 文章 | 62599057009cb60464d02aa7 |
class GodZilla: <NEW_LINE> <INDENT> def __init__(self, name, health=100): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._health_points = health <NEW_LINE> self._damage = randint(10, 50) <NEW_LINE> <DEDENT> def attack(self, player): <NEW_LINE> <INDENT> player.lower_hp(self._damage) <NEW_LINE> print("you have lost {} of health".format(self._damage)) <NEW_LINE> if player.is_dead(): <NEW_LINE> <INDENT> print("you are dead") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("you are damaged") <NEW_LINE> <DEDENT> <DEDENT> def lower_hp_monster(self, points=1): <NEW_LINE> <INDENT> self._health_points -= points <NEW_LINE> <DEDENT> def monster_dead(self): <NEW_LINE> <INDENT> return self._health_points <= 0 | A monster should have the following attributes:
* a name
* hit points
* maximum damage they can inflict
A single method:
* an attack method, which acceptsthe player and deducts
maximum damage from the player's hitpoints | 6259905776e4537e8c3f0afe |
class NonNoisyImages(Dataset): <NEW_LINE> <INDENT> def __init__(self, img_path, transform=None): <NEW_LINE> <INDENT> self.filelist = create_non_noisy_filelist(img_path) <NEW_LINE> self.transform = transform <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> img = Image.open(self.filelist[index]) <NEW_LINE> if self.transform is not None: <NEW_LINE> <INDENT> img = self.transform(img) <NEW_LINE> <DEDENT> return img <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.filelist) | Dataset for reference images to be noised on the fly in the network. | 6259905707d97122c421821d |
class TransportError(Exception): <NEW_LINE> <INDENT> def __init__(self, source, err, msg=''): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> if isinstance(err, socket.error): <NEW_LINE> <INDENT> self.errno, self.msg = err.args <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.errno = err <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s: [%d] %s" % (self.__class__.__name__, self.errno, self.msg) | Transport related errors. | 62599057ac7a0e7691f73a53 |
class VirtualMachineError(Exception): <NEW_LINE> <INDENT> pass | Virtual machine error exception. | 62599057596a897236129069 |
class Breadcrumb(object): <NEW_LINE> <INDENT> def __init__(self, name, url): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__unicode__() <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u"%s,%s" % (self.name, self.url) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return u"Breadcrumb <%s,%s>" % (self.name, self.url) | Breadcrumb can have methods to customize breadcrumb object, Breadcrumbs
class send to us name and url. | 62599057e76e3b2f99fd9f71 |
class ExplorationStats(object): <NEW_LINE> <INDENT> def __init__( self, exp_id, exp_version, num_actual_starts, num_completions, state_stats_mapping): <NEW_LINE> <INDENT> self.exp_id = exp_id <NEW_LINE> self.exp_version = exp_version <NEW_LINE> self.num_actual_starts = num_actual_starts <NEW_LINE> self.num_completions = num_completions <NEW_LINE> self.state_stats_mapping = state_stats_mapping <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> exploration_stats_dict = { 'exp_id': self.exp_id, 'exp_version': self.exp_version, 'num_actual_starts': self.num_actual_starts, 'num_completions': self.num_completions, 'state_stats_mapping': self.state_stats_mapping } <NEW_LINE> return exploration_stats_dict <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if not isinstance(self.exp_id, basestring): <NEW_LINE> <INDENT> raise utils.ValidationError( 'Expected exp_id to be a string, received %s' % (self.exp_id)) <NEW_LINE> <DEDENT> if not isinstance(self.exp_version, int): <NEW_LINE> <INDENT> raise utils.ValidationError( 'Expected exp_version to be an int, received %s' % ( self.exp_version)) <NEW_LINE> <DEDENT> if not isinstance(self.num_actual_starts, int): <NEW_LINE> <INDENT> raise utils.ValidationError( 'Expected num_actual_starts to be an int, received %s' % ( self.num_actual_starts)) <NEW_LINE> <DEDENT> if self.num_actual_starts < 0: <NEW_LINE> <INDENT> raise utils.ValidationError( '%s cannot have negative values' % ('num_actual_starts')) <NEW_LINE> <DEDENT> if not isinstance(self.num_completions, int): <NEW_LINE> <INDENT> raise utils.ValidationError( 'Expected num_completions to be an int, received %s' % ( self.num_completions)) <NEW_LINE> <DEDENT> if self.num_completions < 0: <NEW_LINE> <INDENT> raise utils.ValidationError( '%s cannot have negative values' % ('num_completions')) <NEW_LINE> <DEDENT> if not isinstance(self.state_stats_mapping, dict): <NEW_LINE> <INDENT> raise utils.ValidationError( 'Expected state_stats_mapping to be a dict, received %s' % ( self.state_stats_mapping)) | Domain object representing analytics data for an exploration. | 6259905724f1403a92686388 |
class UserSchema(ma.Schema): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserModel <NEW_LINE> fields = ("username", "password", "id") <NEW_LINE> load_only = ( "password", ) <NEW_LINE> dump_only = ("id",) | Schema for dump/load user model. | 62599057adb09d7d5dc0badd |
class GratisDNSProviderTests(TestCase, IntegrationTestsV2): <NEW_LINE> <INDENT> Provider = Provider <NEW_LINE> provider_name = "gratisdns" <NEW_LINE> domain = "denisa.dk" <NEW_LINE> gratisdns_session = "0123456789abcdef0123456789abcdef" <NEW_LINE> def _filter_post_data_parameters(self): <NEW_LINE> <INDENT> return ["login", "password"] <NEW_LINE> <DEDENT> def _filter_headers(self): <NEW_LINE> <INDENT> return ["Cookie"] <NEW_LINE> <DEDENT> def _replace_auth(self, cookie): <NEW_LINE> <INDENT> cookie = re.sub("ORGID=.*;", f"ORGID={self.gratisdns_session};", cookie) <NEW_LINE> return cookie <NEW_LINE> <DEDENT> def _filter_response(self, response): <NEW_LINE> <INDENT> if "basestring" not in globals(): <NEW_LINE> <INDENT> basestring = str <NEW_LINE> <DEDENT> if "set-cookie" in response["headers"]: <NEW_LINE> <INDENT> if isinstance(response["headers"]["set-cookie"], basestring): <NEW_LINE> <INDENT> response["headers"]["set-cookie"] = self._replace_auth( response["headers"]["set-cookie"] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i, cookie in enumerate(response["headers"]["set-cookie"]): <NEW_LINE> <INDENT> response["headers"]["set-cookie"][i] = self._replace_auth(cookie) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return response | TestCase for GratisDNS | 6259905745492302aabfda4b |
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.maxDiff = None <NEW_LINE> filename = 'chart_axis33.xlsx' <NEW_LINE> test_dir = 'xlsxwriter/test/comparison/' <NEW_LINE> self.got_filename = test_dir + '_test_' + filename <NEW_LINE> self.exp_filename = test_dir + 'xlsx_files/' + filename <NEW_LINE> self.ignore_files = [] <NEW_LINE> self.ignore_elements = {} <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> chart = workbook.add_chart({'type': 'line'}) <NEW_LINE> chart.axis_ids = [68827008, 68898816] <NEW_LINE> data = [ [1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15], ] <NEW_LINE> worksheet.write_column('A1', data[0]) <NEW_LINE> worksheet.write_column('B1', data[1]) <NEW_LINE> worksheet.write_column('C1', data[2]) <NEW_LINE> chart.add_series({'values': '=Sheet1!$A$1:$A$5'}) <NEW_LINE> chart.add_series({'values': '=Sheet1!$B$1:$B$5'}) <NEW_LINE> chart.add_series({'values': '=Sheet1!$C$1:$C$5'}) <NEW_LINE> chart.set_x_axis({'name': 'XXX', 'name_font': {'rotation': -45, 'baseline': -1}}) <NEW_LINE> chart.set_y_axis({'name': 'YYY', 'name_font': {'rotation': -45, 'baseline': -1}}) <NEW_LINE> worksheet.insert_chart('E9', chart) <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual() | Test file created by XlsxWriter against a file created by Excel. | 6259905729b78933be26ab7e |
class WeaponDatabase(object): <NEW_LINE> <INDENT> def __len__(self): <NEW_LINE> <INDENT> return _weapon_scripts._length <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> for info in self: <NEW_LINE> <INDENT> if info.class_name != item: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> return info <NEW_LINE> <DEDENT> raise NameError('"{0}" is not a valid weapon name.'.format(item)) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for index in range(_weapon_scripts._length): <NEW_LINE> <INDENT> yield make_object(WeaponInfo, _weapon_scripts._find(index)) | WeaponDatabase accessor class. | 62599057a17c0f6771d5d65a |
class ComputeForwardingRulesAggregatedListRequest(_messages.Message): <NEW_LINE> <INDENT> filter = _messages.StringField(1) <NEW_LINE> maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32, default=500) <NEW_LINE> pageToken = _messages.StringField(3) <NEW_LINE> project = _messages.StringField(4, required=True) | A ComputeForwardingRulesAggregatedListRequest object.
Fields:
filter: Sets a filter expression for filtering listed resources, in the
form filter={expression}. Your {expression} must be in the format:
FIELD_NAME COMPARISON_STRING LITERAL_STRING. The FIELD_NAME is the name
of the field you want to compare. Only atomic field types are supported
(string, number, boolean). The COMPARISON_STRING must be either eq
(equals) or ne (not equals). The LITERAL_STRING is the string value to
filter to. The literal value must be valid for the type of field
(string, number, boolean). For string fields, the literal value is
interpreted as a regular expression using RE2 syntax. The literal value
must match the entire field. For example, filter=name ne example-
instance.
maxResults: Maximum count of results to be returned.
pageToken: Specifies a page token to use. Use this parameter if you want
to list the next page of results. Set pageToken to the nextPageToken
returned by a previous list request.
project: Name of the project scoping this request. | 62599057e5267d203ee6ce62 |
class WMS13GetMapGIFDatasetTestCase(wmsbase.WMS13GetMapTestCase): <NEW_LINE> <INDENT> layers = ("mosaic_MER_FRS_1PNPDE20060822_092058_000001972050_00308_23408_0077_RGB_reduced",) <NEW_LINE> bbox = (8.5, 32.2, 25.4, 46.3) <NEW_LINE> frmt = "image/gif" | Test a GetMap request with a dataset series. | 6259905715baa72349463506 |
@gin.configurable <NEW_LINE> class TransparentEncDecAttention(EncDecAttention): <NEW_LINE> <INDENT> def __init__(self, layers_per_encoder_module=gin.REQUIRED, layers_per_decoder_module=gin.REQUIRED, encoder_num_modules=gin.REQUIRED, decoder_num_modules=gin.REQUIRED, dropout_rate=0.0, **kwargs): <NEW_LINE> <INDENT> super(TransparentEncDecAttention, self).__init__(**kwargs) <NEW_LINE> self.layers_per_encoder_module = layers_per_encoder_module <NEW_LINE> self.layers_per_decoder_module = layers_per_decoder_module <NEW_LINE> self.encoder_num_modules = encoder_num_modules <NEW_LINE> self.decoder_num_modules = decoder_num_modules <NEW_LINE> self.dropout_rate = dropout_rate <NEW_LINE> <DEDENT> def _get_memory_antecedent(self, context): <NEW_LINE> <INDENT> decoder_module_index = context.layer_index // self.layers_per_decoder_module <NEW_LINE> decoder_inputs = self._get_decoder_inputs(context) <NEW_LINE> return decoder_inputs[decoder_module_index] <NEW_LINE> <DEDENT> def _get_decoder_inputs(self, context): <NEW_LINE> <INDENT> if hasattr(context, "decoder_layers_per_module"): <NEW_LINE> <INDENT> return context.decoder_layers_per_module <NEW_LINE> <DEDENT> encoder_layer_outputs = [ mtf.layers.rename_length_to_memory_length(output) for output in context.encoder_layer_outputs ] <NEW_LINE> layers_per_module = self.layers_per_encoder_module <NEW_LINE> encoder_module_outputs_dim = mtf.Dimension( "encoder_module_outputs", size=self.encoder_num_modules + 1) <NEW_LINE> decoder_module_inputs_dim = mtf.Dimension( "decoder_module_inputs", size=self.decoder_num_modules) <NEW_LINE> encoder_module_outputs = mtf.stack( [encoder_layer_outputs[0]] + encoder_layer_outputs[layers_per_module::layers_per_module], dim_name="encoder_module_outputs") <NEW_LINE> stddev = 1.0 <NEW_LINE> if not mtf.layers.unit_scaling_convention(): <NEW_LINE> <INDENT> stddev *= encoder_module_outputs_dim.size ** -0.5 <NEW_LINE> <DEDENT> w = mtf.get_variable( context.mesh, "w", mtf.Shape([encoder_module_outputs_dim, decoder_module_inputs_dim]), initializer=tf.random_normal_initializer(stddev=stddev), dtype=context.variable_dtype) <NEW_LINE> w = mtf.dropout(w, context.train, 1.0 - self.dropout_rate) <NEW_LINE> s = mtf.softmax(w, reduced_dim=encoder_module_outputs_dim) <NEW_LINE> z = mtf.layers.us_einsum([s, encoder_module_outputs], reduced_dims=[encoder_module_outputs_dim]) <NEW_LINE> input_per_decoder = mtf.split( z, split_dim=decoder_module_inputs_dim, num_or_size_splits=decoder_module_inputs_dim.size) <NEW_LINE> context.decoder_layers_per_module = [ mtf.reshape(inpt, z.shape.dims[1:]) for inpt in input_per_decoder ] <NEW_LINE> return context.decoder_layers_per_module | Transparent multi-head attention over encoder output. | 62599057dc8b845886d54b39 |
class sortdict(collections.OrderedDict): <NEW_LINE> <INDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> del self[key] <NEW_LINE> <DEDENT> super(sortdict, self).__setitem__(key, value) <NEW_LINE> <DEDENT> if pycompat.ispypy: <NEW_LINE> <INDENT> def update(self, src): <NEW_LINE> <INDENT> if isinstance(src, dict): <NEW_LINE> <INDENT> src = src.iteritems() <NEW_LINE> <DEDENT> for k, v in src: <NEW_LINE> <INDENT> self[k] = v | a simple sorted dictionary
>>> d1 = sortdict([(b'a', 0), (b'b', 1)])
>>> d2 = d1.copy()
>>> d2
sortdict([('a', 0), ('b', 1)])
>>> d2.update([(b'a', 2)])
>>> list(d2.keys()) # should still be in last-set order
['b', 'a'] | 62599057498bea3a75a5909a |
class SCAError(Exception): <NEW_LINE> <INDENT> pass | Error class for everything SCA-related (e.g. invalid rules or categories) | 6259905707d97122c421821e |
class Window(object): <NEW_LINE> <INDENT> def __init__(self, windowID): <NEW_LINE> <INDENT> self._display = Display() <NEW_LINE> self._root = self._display.screen().root <NEW_LINE> self._window = self._display.create_resource_object('window', windowID) <NEW_LINE> <DEDENT> def reserve_space(self, left=0, right=0, top=0, bottom=0): <NEW_LINE> <INDENT> LEFT = left <NEW_LINE> RIGHT = right <NEW_LINE> TOP = top <NEW_LINE> BOTTOM = bottom <NEW_LINE> self._window.change_property(self._display.intern_atom('_NET_WM_STRUT'), self._display.intern_atom('CARDINAL'), 32, [LEFT, RIGHT, TOP, BOTTOM]) <NEW_LINE> self._display.sync() <NEW_LINE> <DEDENT> def set_wm_state_skip_taskbar(self): <NEW_LINE> <INDENT> self._window.set_wm_state(Display().intern_atom('_NET_WM_STATE_SKIP_TASKBAR')) | Abstract object representing the X Window of an application
obtained with the window ID. | 62599057a219f33f346c7d79 |
class TestModule(unittest.TestCase): <NEW_LINE> <INDENT> def test_configure(self): <NEW_LINE> <INDENT> here = os.path.abspath(os.path.dirname(__file__)) <NEW_LINE> dirs = [os.path.join(here, 'templates')] <NEW_LINE> configure(dirs) <NEW_LINE> self.assertIsInstance(config.engine, MustacheEngine, 'config.engine is invalid') <NEW_LINE> self.assertEqual(config.engine.directories, dirs, 'config.engine.directories is invalid') <NEW_LINE> self.assertEqual(config.engine.renderer.search_dirs, dirs, 'config.engine.search_dirs is invalid') | Test the formalchemy_mustache module. | 625990574428ac0f6e659aaf |
class AbsLoss(LossSelfOperator): <NEW_LINE> <INDENT> @property <NEW_LINE> def _symbol(self): <NEW_LINE> <INDENT> return sympy.Symbol("|{}|".format(self.loss1.loss_text)) <NEW_LINE> <DEDENT> def forward(self, x_dict={}, **kwargs): <NEW_LINE> <INDENT> loss, x_dict = self.loss1.eval(x_dict, return_dict=True, return_all=False, **kwargs) <NEW_LINE> return loss.abs(), x_dict | Apply the `abs` operation to the loss.
Examples
--------
>>> import torch
>>> from pixyz.distributions import Normal
>>> from pixyz.losses import LogProb
>>> p = Normal(loc=torch.tensor(0.), scale=torch.tensor(1.), var=["x"],
... features_shape=[10])
>>> loss_cls = LogProb(p).abs() # equals to AbsLoss(LogProb(p))
>>> print(loss_cls)
|\log p(x)|
>>> sample_x = torch.randn(2, 10) # Psuedo data
>>> loss = loss_cls.eval({"x": sample_x})
>>> print(loss) # doctest: +SKIP
tensor([12.9894, 15.5280]) | 62599057097d151d1a2c25df |
class S3ScenarioTaskModel(S3Model): <NEW_LINE> <INDENT> names = ["scenario_task"] <NEW_LINE> def model(self): <NEW_LINE> <INDENT> T = current.T <NEW_LINE> s3 = current.response.s3 <NEW_LINE> scenario_id = self.scenario_scenario_id <NEW_LINE> task_id = self.project_task_id <NEW_LINE> tablename = "scenario_task" <NEW_LINE> table = self.define_table(tablename, scenario_id(), task_id(), *s3.meta_fields()) <NEW_LINE> s3.crud_strings[tablename] = Storage( title_create = T("Add Task"), title_display = T("Task Details"), title_list = T("Tasks"), title_update = T("Edit Task"), title_search = T("Search Tasks"), subtitle_create = T("Add New Task"), label_list_button = T("List Tasks"), label_create_button = T("Add Task"), label_delete_button = T("Remove Task from this scenario"), msg_record_created = T("Task added"), msg_record_modified = T("Task updated"), msg_record_deleted = T("Task removed"), msg_list_empty = T("No Tasks currently registered in this scenario")) <NEW_LINE> return Storage() | Scenario Tasks Model | 625990573539df3088ecd81b |
class TerminusDeleteWordCommand(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def run(self, edit, forward=False): <NEW_LINE> <INDENT> view = self.view <NEW_LINE> terminal = Terminal.from_id(view.id()) <NEW_LINE> if not terminal: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if len(view.sel()) != 1 or not view.sel()[0].empty(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if forward: <NEW_LINE> <INDENT> pt = view.sel()[0].end() <NEW_LINE> line = view.line(pt) <NEW_LINE> text = view.substr(sublime.Region(pt, line.end())) <NEW_LINE> match = re.search(r"(?<=\w)\b", text) <NEW_LINE> if match: <NEW_LINE> <INDENT> n = match.span()[0] <NEW_LINE> n = n if n > 0 else 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n = 1 <NEW_LINE> <DEDENT> delete_code = get_key_code("delete") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pt = view.sel()[0].end() <NEW_LINE> line = view.line(pt) <NEW_LINE> text = view.substr(sublime.Region(line.begin(), pt)) <NEW_LINE> matches = list(re.finditer(r"\b(?=\w)", text)) <NEW_LINE> if matches: <NEW_LINE> <INDENT> for match in matches: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> n = view.rowcol(pt)[1] - match.span()[0] <NEW_LINE> n if n > 0 else 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n = 1 <NEW_LINE> <DEDENT> delete_code = get_key_code("backspace") <NEW_LINE> <DEDENT> self.view.run_command("terminus_show_cursor") <NEW_LINE> terminal.send_string(delete_code * n) | On Windows, ctrl+backspace and ctrl+delete are used to delete words
However, there is no standard key to delete word with ctrl+backspace
a workaround is to repeatedly apply backspace to delete word | 6259905794891a1f408ba1b0 |
class Links(object): <NEW_LINE> <INDENT> def __init__(self, collection): <NEW_LINE> <INDENT> self.collection = list(collection) <NEW_LINE> self.index = len(self.collection) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return map(lambda r: Link(r), self.collection) <NEW_LINE> <DEDENT> def route_for(self, rel): <NEW_LINE> <INDENT> return Link( next(filter(lambda r: Link(r).rel() == rel, self.collection))) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.index == 0: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> self.index = self.index - 1 <NEW_LINE> return self.collection[self.index] <NEW_LINE> <DEDENT> def find_by_rel(self, rel): <NEW_LINE> <INDENT> for record in self.collection: <NEW_LINE> <INDENT> if Link(record).rel() == rel: <NEW_LINE> <INDENT> return record <NEW_LINE> <DEDENT> <DEDENT> raise LinkNotFoundError <NEW_LINE> <DEDENT> def to_csv(self): <NEW_LINE> <INDENT> headings = ['Name', 'Url'] <NEW_LINE> data = [r.to_csv() for r in self] <NEW_LINE> data.insert(0, headings) <NEW_LINE> return data | Links
===============
This returns an instance of the Links domain model | 62599057ac7a0e7691f73a55 |
class MarkdownContext: <NEW_LINE> <INDENT> def __init__(self, size=6): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self._arr = [None] * size <NEW_LINE> <DEDENT> def clear(self, idx=0): <NEW_LINE> <INDENT> self._arr[idx:] = [None] * (self.size - idx) <NEW_LINE> <DEDENT> def insert(self, idx, val): <NEW_LINE> <INDENT> print("CONTEXT.INSERT: [%d]%s; %s" % (idx, val, str(self._arr))) <NEW_LINE> self._arr[idx - 1] = val <NEW_LINE> self.clear(idx) <NEW_LINE> <DEDENT> def get(self, idx=None): <NEW_LINE> <INDENT> if idx: <NEW_LINE> <INDENT> return self._arr[idx - 1] <NEW_LINE> <DEDENT> return {x: None for x in self._arr if x is not None} | Manage depth-based context from header tags in markdown.
Example Markdown:
# Header1
## Header2
#### Header4
Represents as the following internally:
[Header1, Header2, None, Header4, None, None]
Insertions clears the subsequent levels;
thus, inserting at Header3 clears the array values of 3, 4, 5. | 62599057e76e3b2f99fd9f73 |
class VariantCondition(object): <NEW_LINE> <INDENT> __slots__ = ( '_user_id', '_variation_key', ) <NEW_LINE> @property <NEW_LINE> def user_id(self): <NEW_LINE> <INDENT> return self._user_id <NEW_LINE> <DEDENT> @user_id.setter <NEW_LINE> def user_id(self, value): <NEW_LINE> <INDENT> self._user_id = msgbuffers.validate_string( 'VariantCondition.user_id', value, 255) <NEW_LINE> <DEDENT> @property <NEW_LINE> def variation_key(self): <NEW_LINE> <INDENT> return self._variation_key <NEW_LINE> <DEDENT> @variation_key.setter <NEW_LINE> def variation_key(self, value): <NEW_LINE> <INDENT> self._variation_key = msgbuffers.validate_string( 'VariantCondition.variation_key', value, 255) <NEW_LINE> <DEDENT> def __init__(self, user_id='', variation_key=''): <NEW_LINE> <INDENT> self.user_id = user_id <NEW_LINE> self.variation_key = variation_key <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack(cls, buffer): <NEW_LINE> <INDENT> reader = msgbuffers.BinaryReader(buffer) <NEW_LINE> value = cls.unpack_from(reader) <NEW_LINE> if reader.tell() != len(reader): <NEW_LINE> <INDENT> raise msgbuffers.ReadError( ('VariantCondition.unpack received a buffer of length {length}, ' + 'but only {position} bytes were read.').format( length=len(reader), position=reader.tell())) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack_from(cls, reader): <NEW_LINE> <INDENT> _user_id = reader.read_string('B') <NEW_LINE> _variation_key = reader.read_string('B') <NEW_LINE> return cls(_user_id, _variation_key) <NEW_LINE> <DEDENT> def pack(self): <NEW_LINE> <INDENT> writer = msgbuffers.BinaryWriter() <NEW_LINE> self.pack_to(writer) <NEW_LINE> return writer.dumps() <NEW_LINE> <DEDENT> def pack_to(self, writer): <NEW_LINE> <INDENT> writer.write_string(self._user_id, 'B') <NEW_LINE> writer.write_string(self._variation_key, 'B') <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return (self._user_id == other._user_id and self._variation_key == other._variation_key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return (msgbuffers.size_string(self._user_id, 'B') + msgbuffers.size_string(self._variation_key, 'B')) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{type}(user_id={user_id}, variation_key={variation_key})'.format( type=type(self).__name__, user_id=msgbuffers.shorten_string(self._user_id), variation_key=msgbuffers.shorten_string(self._variation_key)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{type}(user_id={user_id}, variation_key={variation_key})'.format( type=type(self).__name__, user_id=repr(self._user_id), variation_key=repr(self._variation_key)) | Generated message-passing structure. | 62599057b57a9660fecd2ff0 |
class Unknown(Variable): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<unknown>' | A variable which type is unknown. | 625990574e4d56256637397c |
class ItsiSession(OAuth2Session): <NEW_LINE> <INDENT> def get_user(self): <NEW_LINE> <INDENT> user_json = self.get(USER_JSON_URL) <NEW_LINE> return json.loads(user_json.content) | An OAuth 2.0 Session container for ITSI Portal.
This is a simple wrapper around the OAuth2Session class, and provides a
convenience method for getting user data from ITSI Portal.
Documentation for OAuth2Session can be found here:
https://rauth.readthedocs.org/en/latest/api/#oauth-2-0-sessions | 6259905707d97122c421821f |
class FinishCommand(BaseFinishCommand, PivotalTrackerCommand): <NEW_LINE> <INDENT> def _merge_branch(self, branch, *args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.git.get_branch(branch) <NEW_LINE> self.story <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.git.get_branch(self.branch) <NEW_LINE> <DEDENT> message = "[finish #{0:d}] Merge branch '{1}'".format(self.story.id, branch) <NEW_LINE> self.git.merge_branch(branch, message, args) <NEW_LINE> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> if self.story.type == Story.TYPE_CHORE: <NEW_LINE> <INDENT> state = Story.STATE_ACCEPTED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> state = Story.STATE_FINISHED <NEW_LINE> <DEDENT> self.pt.set_story(self.project, self.story, state) <NEW_LINE> puts("Finished story #{0:d}.".format(self.story.id)) <NEW_LINE> super(FinishCommand, self).finalize() | Finish a story branch.
| 6259905724f1403a92686389 |
class JobPrePostDispatcher(Dispatcher): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(JobPrePostDispatcher, self).__init__('avocado.plugins.job.prepost') | Calls extensions before Job execution
Automatically adds all the extension with entry points registered under
'avocado.plugins.job.prepost' | 62599057d53ae8145f9199d7 |
class GRPCTestServerStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.SimpleMethod = channel.unary_unary( "/GRPCTestServer/SimpleMethod", request_serializer=test__server__pb2.Request.SerializeToString, response_deserializer=test__server__pb2.Response.FromString, ) <NEW_LINE> self.ClientStreamingMethod = channel.stream_unary( "/GRPCTestServer/ClientStreamingMethod", request_serializer=test__server__pb2.Request.SerializeToString, response_deserializer=test__server__pb2.Response.FromString, ) <NEW_LINE> self.ServerStreamingMethod = channel.unary_stream( "/GRPCTestServer/ServerStreamingMethod", request_serializer=test__server__pb2.Request.SerializeToString, response_deserializer=test__server__pb2.Response.FromString, ) <NEW_LINE> self.BidirectionalStreamingMethod = channel.stream_stream( "/GRPCTestServer/BidirectionalStreamingMethod", request_serializer=test__server__pb2.Request.SerializeToString, response_deserializer=test__server__pb2.Response.FromString, ) | Missing associated documentation comment in .proto file | 625990573eb6a72ae038bbd4 |
class WKBSpatialElement(SpatialElement, expression.Function): <NEW_LINE> <INDENT> def __init__(self, desc, srid=4326, geometry_type='GEOMETRY'): <NEW_LINE> <INDENT> assert isinstance(desc, (six.binary_type, buffer)) <NEW_LINE> self.desc = desc <NEW_LINE> self.srid = srid <NEW_LINE> self.geometry_type = geometry_type <NEW_LINE> expression.Function.__init__(self, "") | Represents a Geometry value as expressed in the OGC Well
Known Binary (WKB) format.
Extends expression.Function so that in a SQL expression context the value
is interpreted as 'GeomFromWKB(value)' or as the equivalent function in the
currently used database . | 625990578da39b475be04761 |
class Solution(object): <NEW_LINE> <INDENT> def removeElement(self, nums, val): <NEW_LINE> <INDENT> old_size = len(nums) <NEW_LINE> new_index = 0 <NEW_LINE> for curr_index in range(old_size): <NEW_LINE> <INDENT> curr_val = nums[curr_index] <NEW_LINE> if curr_val != val: <NEW_LINE> <INDENT> nums[new_index] = curr_val <NEW_LINE> new_index += 1 <NEW_LINE> <DEDENT> <DEDENT> return new_index | idea:
2 index pointers:
new_index, points at new ends
curr_index, walk through the array
return new_index | 62599057d486a94d0ba2d53e |
class NumBinner(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, columns_to_bin, cat_columns, num_columns): <NEW_LINE> <INDENT> self.columns_to_bin = columns_to_bin <NEW_LINE> self.cat_columns = cat_columns <NEW_LINE> self.num_columns = num_columns <NEW_LINE> <DEDENT> def transform(self, data): <NEW_LINE> <INDENT> for col in self.columns_to_bin: <NEW_LINE> <INDENT> bins = [data[col].quantile(x/100) for x in range(0, 101, 25)] <NEW_LINE> data[col] = pd.cut(data[col].values, bins) <NEW_LINE> data[col] = data[col].astype('object') <NEW_LINE> self.cat_columns.append(col) <NEW_LINE> self.num_columns.remove(col) <NEW_LINE> <DEDENT> return self.cat_columns, self.num_columns, data <NEW_LINE> <DEDENT> def fit(self, *_): <NEW_LINE> <INDENT> return self | Binnig data in specified numerical columns values using 3 uniform quantiles (from 0 to 1).
Usage example:
`new_cat_cols, new_num_cols, modified_df = NumBinner(columns_to_bin, cat_cols, num_cols).fit_transform(df)` | 6259905729b78933be26ab7f |
class ProjectsLocationsModelsService(base_api.BaseApiService): <NEW_LINE> <INDENT> _NAME = u'projects_locations_models' <NEW_LINE> def __init__(self, client): <NEW_LINE> <INDENT> super(SpeechV1p1beta1.ProjectsLocationsModelsService, self).__init__(client) <NEW_LINE> self._upload_configs = { } <NEW_LINE> <DEDENT> def Create(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('Create') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> Create.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1p1beta1/projects/{projectsId}/locations/{locationsId}/models', http_method=u'POST', method_id=u'speech.projects.locations.models.create', ordered_params=[u'parent'], path_params=[u'parent'], query_params=[u'name'], relative_path=u'v1p1beta1/{+parent}/models', request_field=u'model', request_type_name=u'SpeechProjectsLocationsModelsCreateRequest', response_type_name=u'Operation', supports_download=False, ) <NEW_LINE> def Deploy(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('Deploy') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> Deploy.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1p1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}:deploy', http_method=u'POST', method_id=u'speech.projects.locations.models.deploy', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1p1beta1/{+name}:deploy', request_field=u'deployModelRequest', request_type_name=u'SpeechProjectsLocationsModelsDeployRequest', response_type_name=u'Operation', supports_download=False, ) <NEW_LINE> def Evaluate(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('Evaluate') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> Evaluate.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1p1beta1/projects/{projectsId}/locations/{locationsId}/models/{modelsId}:evaluate', http_method=u'POST', method_id=u'speech.projects.locations.models.evaluate', ordered_params=[u'name'], path_params=[u'name'], query_params=[], relative_path=u'v1p1beta1/{+name}:evaluate', request_field=u'evaluateModelRequest', request_type_name=u'SpeechProjectsLocationsModelsEvaluateRequest', response_type_name=u'Operation', supports_download=False, ) <NEW_LINE> def List(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('List') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> List.method_config = lambda: base_api.ApiMethodInfo( flat_path=u'v1p1beta1/projects/{projectsId}/locations/{locationsId}/models', http_method=u'GET', method_id=u'speech.projects.locations.models.list', ordered_params=[u'parent'], path_params=[u'parent'], query_params=[u'filter', u'pageSize', u'pageToken'], relative_path=u'v1p1beta1/{+parent}/models', request_field='', request_type_name=u'SpeechProjectsLocationsModelsListRequest', response_type_name=u'ListModelsResponse', supports_download=False, ) | Service class for the projects_locations_models resource. | 6259905710dbd63aa1c72134 |
class MongoAdvancedSearchFilter(BaseFilterBackend): <NEW_LINE> <INDENT> def filter_queryset(self, request, queryset, view): <NEW_LINE> <INDENT> as_kwargs = get_as_kwargs(request) <NEW_LINE> if as_kwargs: <NEW_LINE> <INDENT> queryset = queryset.filter(**as_kwargs) <NEW_LINE> <DEDENT> return queryset | mongo高级搜索过滤器 | 62599057baa26c4b54d5081b |
class Category(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name | Category - 种类
Django 要求模型必须继承models.Model | 625990577d847024c075d951 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.