code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class LearningAgent(Agent): <NEW_LINE> <INDENT> def __init__(self, env, learning=False, epsilon=1.0, alpha=0.5): <NEW_LINE> <INDENT> super(LearningAgent, self).__init__(env) <NEW_LINE> self.planner = RoutePlanner(self.env, self) <NEW_LINE> self.valid_actions = self.env.valid_actions <NEW_LINE> self.learning = learning <NEW_LINE> self.Q = dict() <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.alpha = alpha <NEW_LINE> self.t = 0.0 <NEW_LINE> <DEDENT> def reset(self, destination=None, testing=False): <NEW_LINE> <INDENT> self.planner.route_to(destination) <NEW_LINE> if testing is True: <NEW_LINE> <INDENT> self.epsilon = 0.0 <NEW_LINE> self.alpha = 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.t += 1.0 <NEW_LINE> self.epsilon = math.fabs(math.cos(self.alpha*self.t)) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def build_state(self): <NEW_LINE> <INDENT> waypoint = self.planner.next_waypoint() <NEW_LINE> inputs = self.env.sense(self) <NEW_LINE> deadline = self.env.get_deadline(self) <NEW_LINE> state = (waypoint, inputs['light'], inputs['left'], inputs['right'], inputs['oncoming']) <NEW_LINE> return state <NEW_LINE> <DEDENT> def get_maxQ(self, state): <NEW_LINE> <INDENT> maxQ = max(self.Q[state].values()) <NEW_LINE> actions_maxQ_values = [x for x in self.Q.get(state) if self.Q.get(state)[x] is maxQ] <NEW_LINE> return maxQ,actions_maxQ_values <NEW_LINE> <DEDENT> def createQ(self, state): <NEW_LINE> <INDENT> if self.learning is True: <NEW_LINE> <INDENT> if self.Q.get(state) is None: <NEW_LINE> <INDENT> self.Q[state] = {} <NEW_LINE> for i in self.valid_actions: <NEW_LINE> <INDENT> self.Q.get(state)[i] = 0.0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> def choose_action(self, state): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.next_waypoint = self.planner.next_waypoint() <NEW_LINE> epsilon_action = random.random() <= self.epsilon <NEW_LINE> if self.learning is False or epsilon_action is True: <NEW_LINE> <INDENT> action = random.choice(self.valid_actions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> maxQ,maxQ_action = self.get_maxQ(state) <NEW_LINE> action = random.choice(maxQ_action) <NEW_LINE> <DEDENT> return action <NEW_LINE> <DEDENT> def learn(self, state, action, reward): <NEW_LINE> <INDENT> if self.learning: <NEW_LINE> <INDENT> self.Q.get(state)[action] = self.Q.get(state)[action] + self.alpha * (reward -self.Q.get(state)[action]) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> state = self.build_state() <NEW_LINE> self.createQ(state) <NEW_LINE> action = self.choose_action(state) <NEW_LINE> reward = self.env.act(self, action) <NEW_LINE> self.learn(state, action, reward) <NEW_LINE> return | An agent that learns to drive in the Smartcab world.
This is the object you will be modifying. | 6259905dbaa26c4b54d508f1 |
class TestValidateEOLDate(BasePyTestCase): <NEW_LINE> <INDENT> def test_none(self): <NEW_LINE> <INDENT> request = mock.Mock() <NEW_LINE> request.errors = Errors() <NEW_LINE> request.validated = {'eol': None} <NEW_LINE> validators.validate_eol_date(request) <NEW_LINE> assert not len(request.errors) <NEW_LINE> <DEDENT> def test_out_of_regex(self): <NEW_LINE> <INDENT> request = mock.Mock() <NEW_LINE> request.errors = Errors() <NEW_LINE> request.validated = { 'eol': date(3120, 11, 5)} <NEW_LINE> validators.validate_eol_date(request) <NEW_LINE> assert request.errors == [ {'location': 'body', 'name': 'eol', 'description': 'End-of-life date may not be in the right range of years (2000-2100)'} ] <NEW_LINE> assert request.errors.status == exceptions.HTTPBadRequest.code <NEW_LINE> <DEDENT> def test_correct_date(self): <NEW_LINE> <INDENT> request = mock.Mock() <NEW_LINE> request.errors = Errors() <NEW_LINE> request.validated = {'eol': date(2022, 11, 5)} <NEW_LINE> validators.validate_eol_date(request) <NEW_LINE> assert not len(request.errors) | Test the validate_eol_date() function. | 6259905d4428ac0f6e659b88 |
class NonSuicidePlayer(RandomCapturePlayer): <NEW_LINE> <INDENT> def getAction(self): <NEW_LINE> <INDENT> p = self.game.getAcceptable(self.color) <NEW_LINE> if len(p) > 0: <NEW_LINE> <INDENT> return [self.color, choice(p)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return RandomCapturePlayer.getAction(self) | do random non-suicide moves in the capture game | 6259905d56b00c62f0fb3f16 |
class Screen(object): <NEW_LINE> <INDENT> pygame.init() <NEW_LINE> _screen = pygame.display.set_mode((800, 600)) <NEW_LINE> _camera = Point(0, 0) <NEW_LINE> center = Point(800 / 2, 600 / 2) <NEW_LINE> def clear(self): <NEW_LINE> <INDENT> self._screen.fill(COLOR_BLACK) <NEW_LINE> <DEDENT> def blit(self, renderable, point): <NEW_LINE> <INDENT> self._screen.blit(renderable, point) <NEW_LINE> <DEDENT> @property <NEW_LINE> def camera(self): <NEW_LINE> <INDENT> return self._camera <NEW_LINE> <DEDENT> @camera.setter <NEW_LINE> def camera(self, point): <NEW_LINE> <INDENT> self._camera.x, self._camera.y = point.x, point.y | The global screen object. | 6259905dd486a94d0ba2d613 |
class HeaderSection: <NEW_LINE> <INDENT> REQUIRED_HEADER_ENTITIES = ('FILE_DESCRIPTION', 'FILE_NAME', 'FILE_SCHEMA') <NEW_LINE> OPTIONAL_HEADER_ENTITIES = ('FILE_POPULATION', 'SECTION_LANGUAGE', 'SECTION_CONTENT') <NEW_LINE> KNOWN_HEADER_ENTITIES = set(REQUIRED_HEADER_ENTITIES) | set(OPTIONAL_HEADER_ENTITIES) <NEW_LINE> def __init__(self, entities: Dict = None): <NEW_LINE> <INDENT> self.entities: Dict[str: Entity] = entities or OrderedDict() <NEW_LINE> <DEDENT> def add(self, entity: Entity) -> None: <NEW_LINE> <INDENT> self.entities[entity.name] = entity <NEW_LINE> <DEDENT> def __getitem__(self, name: str) -> Entity: <NEW_LINE> <INDENT> return self.entities[name] <NEW_LINE> <DEDENT> def __contains__(self, name: str) -> bool: <NEW_LINE> <INDENT> return name in self.entities <NEW_LINE> <DEDENT> def get(self, name: str) -> Optional[Entity]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.entities[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def set_file_description(self, description: Tuple = None, level: str = '2;1') -> None: <NEW_LINE> <INDENT> description = ParameterList(description) if description else ParameterList() <NEW_LINE> self.add(Entity('FILE_DESCRIPTION', ParameterList(( ParameterList(description), str(level) )))) <NEW_LINE> <DEDENT> def set_file_name(self, name: str, time_stamp: str = None, author: str = '', organization: Tuple = None, preprocessor_version: Tuple = None, organization_system: str = '', autorization: str = '', ) -> None: <NEW_LINE> <INDENT> if time_stamp is None: <NEW_LINE> <INDENT> time_stamp = datetime.utcnow().isoformat(timespec='seconds') <NEW_LINE> <DEDENT> organization = ParameterList(organization) if organization else ParameterList(('',)) <NEW_LINE> preprocessor_version = ParameterList(preprocessor_version) if preprocessor_version else ParameterList(('',)) <NEW_LINE> self.add(Entity('FILE_NAME', ParameterList(( str(name), time_stamp, author, organization, preprocessor_version, organization_system, autorization, )))) <NEW_LINE> <DEDENT> def set_file_schema(self, schemas: Iterable) -> None: <NEW_LINE> <INDENT> schema = ParameterList(schemas) if schemas else ParameterList() <NEW_LINE> self.add(Entity('FILE_SCHEMA', ParameterList((schema,)))) <NEW_LINE> <DEDENT> def write(self, fp: TextIO) -> None: <NEW_LINE> <INDENT> def write_entities(names, optional=False): <NEW_LINE> <INDENT> for name in names: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> entity = self[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> if not optional: <NEW_LINE> <INDENT> raise StepFileStructureError(f'Missing required header entity: {name}') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> fp.write(str(entity)) <NEW_LINE> fp.write(END_OF_INSTANCE) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> fp.write('HEADER' + END_OF_INSTANCE) <NEW_LINE> write_entities(names=HeaderSection.REQUIRED_HEADER_ENTITIES, optional=False) <NEW_LINE> write_entities(names=HeaderSection.OPTIONAL_HEADER_ENTITIES, optional=True) <NEW_LINE> fp.write('ENDSEC' + END_OF_INSTANCE) <NEW_LINE> unknown_header_entities = set(self.entities.keys()) - HeaderSection.KNOWN_HEADER_ENTITIES <NEW_LINE> if len(unknown_header_entities): <NEW_LINE> <INDENT> raise StepFileStructureError(f'Found unsupported header entities: {unknown_header_entities}') | The HEADER section has a fixed structure consisting of 3 to 6 groups in the given order. Except for the data fields
time_stamp and FILE_SCHEMA all fields may contain empty strings. | 6259905d2c8b7c6e89bd4e3a |
class HelloCommand(Command): <NEW_LINE> <INDENT> pass | Complete me please.
hello
{ --dangerous-option= : This $hould be `escaped`. }
{ --option-without-description } | 6259905dbe8e80087fbc06d0 |
class JsonRenderView(object): <NEW_LINE> <INDENT> def render_to_json(self, model_data): <NEW_LINE> <INDENT> data = serializers.serialize('json', model_data) <NEW_LINE> return HttpResponse(data, mimetype="application/json") | Renders a django model directly to json | 6259905d7d847024c075da1e |
@implementer(IAccountUnlocked) <NEW_LINE> class AccountUnlocked(UserActivity): <NEW_LINE> <INDENT> activity = u'unlock_account' <NEW_LINE> def __init__(self, request, user, actor=None, **activity_detail): <NEW_LINE> <INDENT> super(AccountUnlocked, self).__init__(request, user, actor, **activity_detail) | An instance of this class is emitted as an :term:`event`
whenever a user's account is unlocked. See :class:`UserActivity`. | 6259905df548e778e596cbd5 |
class MonitoringLog(object): <NEW_LINE> <INDENT> def __init__(self, logs_to_stdout): <NEW_LINE> <INDENT> self.logs_to_stdout = logs_to_stdout <NEW_LINE> if not (os.path.isdir("vypr_monitoring_logs")): <NEW_LINE> <INDENT> os.mkdir("vypr_monitoring_logs") <NEW_LINE> <DEDENT> self.log_file_name = "vypr_monitoring_logs/%s" % str(datetime.datetime.now()). replace(" ", "_").replace(":", "_").replace(".", "_").replace("-", "_") <NEW_LINE> self.handle = None <NEW_LINE> <DEDENT> def start_logging(self): <NEW_LINE> <INDENT> self.handle = open(self.log_file_name, "a") <NEW_LINE> <DEDENT> def end_logging(self): <NEW_LINE> <INDENT> self.handle.close() <NEW_LINE> <DEDENT> def log(self, message): <NEW_LINE> <INDENT> if self.handle: <NEW_LINE> <INDENT> message = "[VyPR monitoring - %s] %s" % (str(datetime.datetime.now()), message) <NEW_LINE> self.handle.write("%s\n" % message) <NEW_LINE> self.handle.flush() <NEW_LINE> if self.logs_to_stdout: <NEW_LINE> <INDENT> print(message) | Class to handle monitoring logging. | 6259905dadb09d7d5dc0bbb6 |
class GraphQLApplication(object): <NEW_LINE> <INDENT> def __init__( self, schema, execute_options={}, format_error=graphql_server.default_format_error, encode=graphql_server.json_encode ): <NEW_LINE> <INDENT> self.schema = schema <NEW_LINE> self.execute_options = execute_options <NEW_LINE> self.format_error = format_error <NEW_LINE> self.encode = encode <NEW_LINE> <DEDENT> def __call__(self, environ, start_response): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> headers = [('Content-type', 'application/json; charset=utf-8')] <NEW_LINE> request_method = environ['REQUEST_METHOD'].lower() <NEW_LINE> data = _parse_body(environ) <NEW_LINE> query_data = dict(parse_qsl(environ.get('QUERY_STRING', ''))) <NEW_LINE> execute_options = { k: v(environ) if callable(v) else v for k, v in self.execute_options.items() } <NEW_LINE> execution_results, all_params = graphql_server.run_http_query( self.schema, request_method, data, query_data=query_data, **execute_options ) <NEW_LINE> body, status_code = graphql_server.encode_execution_results( execution_results, format_error=self.format_error, is_batch=isinstance(data, list), encode=self.encode ) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print('Error {}'.format(e)) <NEW_LINE> headers = [('Content-type', 'application/json; charset=utf-8')] <NEW_LINE> header_dict = getattr(e, 'headers', None) or {} <NEW_LINE> headers += list(header_dict.items()) <NEW_LINE> status_code = getattr(e, 'status_code', 500) <NEW_LINE> errors = [self.format_error(e)] <NEW_LINE> body = self.encode({'errors': errors}) <NEW_LINE> <DEDENT> start_response(_status(status_code), headers) <NEW_LINE> return [body.encode('utf8')] | WSGI GraphQL Application. | 6259905d435de62698e9d451 |
class War: <NEW_LINE> <INDENT> def __init__(self, enemy_modifier, population, resentment): <NEW_LINE> <INDENT> self.casualties = 0 <NEW_LINE> self.annexed = 0 <NEW_LINE> self.won = False <NEW_LINE> self.mercenary_pay = 0 <NEW_LINE> self.looting_victims = 0 <NEW_LINE> self.captured_grain = 0 <NEW_LINE> self.ceasefire = False <NEW_LINE> self.population = population <NEW_LINE> self.resentment = 0 <NEW_LINE> mood = 1.2 - (resentment / 16.0) <NEW_LINE> self.away = enemy_modifier * 18 + 85 <NEW_LINE> self.home = round(population * mood) + 13 <NEW_LINE> <DEDENT> def first_strike(self, desperation, roll): <NEW_LINE> <INDENT> self.ceasefire = self.home > self.away <NEW_LINE> if self.ceasefire: <NEW_LINE> <INDENT> self.casualties = 1 + desperation <NEW_LINE> self.resentment = 2 * self.casualties <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.casualties = 2 + desperation + roll <NEW_LINE> self.away += (3 * self.casualties) <NEW_LINE> <DEDENT> <DEDENT> def campaign(self, mercs, grain): <NEW_LINE> <INDENT> self.home += (mercs * 7) <NEW_LINE> self.away = round(self.away * 1.95) <NEW_LINE> casualties = round((self.away - (mercs * 4) - round(self.home * 0.25)) / 10) <NEW_LINE> self.casualties += min(self.population-self.casualties, max(0, casualties)) <NEW_LINE> self.annexed = round((self.home - self.away) * 0.8) <NEW_LINE> self.won = self.home > self.away <NEW_LINE> if self.won: <NEW_LINE> <INDENT> self.landslide = self.annexed > 399 <NEW_LINE> if self.landslide: <NEW_LINE> <INDENT> self.casualties = -47 <NEW_LINE> self.captured_grain = 3513 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.captured_grain = round(self.annexed * 1.7) <NEW_LINE> <DEDENT> grain += self.captured_grain <NEW_LINE> <DEDENT> pay = mercs * 40 <NEW_LINE> if pay > grain: <NEW_LINE> <INDENT> self.mercenary_pay = grain <NEW_LINE> looted = round((pay - grain) / 7) + 1 <NEW_LINE> self.looting_victims = min(self.population-self.casualties, looted) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mercenary_pay = pay <NEW_LINE> <DEDENT> self.resentment = 2 * self.casualties + 3 * self.looting_victims <NEW_LINE> return self.won | Calculate the outcome, casualties and resentment of war with a neighbouring duchy.
- enemy_modifier: a random integer in the range [1, 9], is a proxy for enemy strength / size.
- population: The number of peasants in your duchy.
- resentment: an integer that gives the level of resentment against you by your peasants. | 6259905d4e4d562566373a53 |
class CrawlResult(collections.Sequence): <NEW_LINE> <INDENT> url = None <NEW_LINE> feed = None <NEW_LINE> hints = None <NEW_LINE> icon_url = None <NEW_LINE> def __init__(self, url, feed, hints, icon_url=None): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.feed = feed <NEW_LINE> self.hints = hints <NEW_LINE> self.icon_url = icon_url <NEW_LINE> <DEDENT> def add_as_subscription(self, subscription_set): <NEW_LINE> <INDENT> if not isinstance(subscription_set, SubscriptionSet): <NEW_LINE> <INDENT> raise TypeError( 'expected an instance of {0.__module__}.{0.__name__}, ' 'not {1!r}'.format(SubscriptionSet, subscription_set) ) <NEW_LINE> <DEDENT> return subscription_set.subscribe(self.feed, icon_uri=self.icon_url) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if index == 0 or index == -3: <NEW_LINE> <INDENT> return self.url <NEW_LINE> <DEDENT> elif index == 1 or index == -2: <NEW_LINE> <INDENT> return self.feed <NEW_LINE> <DEDENT> elif index == 2 or index == -1: <NEW_LINE> <INDENT> return self.hints <NEW_LINE> <DEDENT> raise IndexError('index out of range') | The result of each crawl of a feed.
It mimics triple of (:attr:`url`, :attr:`feed`, :attr:`hints`) for
backward compatibility to below 0.3.0, so you can still take these
values using tuple unpacking, though it's not recommended way to
get these values anymore.
.. versionadded:: 0.3.0 | 6259905d1f5feb6acb164236 |
class ValidationError(object): <NEW_LINE> <INDENT> def __init__(self, code, info = ""): <NEW_LINE> <INDENT> self._code = code <NEW_LINE> self._info = info <NEW_LINE> <DEDENT> NO_ERROR = 0 <NEW_LINE> INVALID_SIGNATURE = 1 <NEW_LINE> NO_SIGNATURE = 2 <NEW_LINE> CANNOT_RETRIEVE_CERTIFICATE = 3 <NEW_LINE> EXPIRED_CERTIFICATE = 4 <NEW_LINE> LOOP_DETECTED = 5 <NEW_LINE> MALFORMED_CERTIFICATE = 6 <NEW_LINE> EXCEEDED_DEPTH_LIMIT = 7 <NEW_LINE> INVALID_KEY_LOCATOR = 8 <NEW_LINE> POLICY_ERROR = 9 <NEW_LINE> IMPLEMENTATION_ERROR = 255 <NEW_LINE> USER_MIN = 256 <NEW_LINE> def getCode(self): <NEW_LINE> <INDENT> return self._code <NEW_LINE> <DEDENT> def getInfo(self): <NEW_LINE> <INDENT> return self._info <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self._code == ValidationError.NO_ERROR: <NEW_LINE> <INDENT> result = "No error" <NEW_LINE> <DEDENT> elif self._code == ValidationError.INVALID_SIGNATURE: <NEW_LINE> <INDENT> result = "Invalid signature" <NEW_LINE> <DEDENT> elif self._code == ValidationError.NO_SIGNATURE: <NEW_LINE> <INDENT> result = "Missing signature" <NEW_LINE> <DEDENT> elif self._code == ValidationError.CANNOT_RETRIEVE_CERTIFICATE: <NEW_LINE> <INDENT> result = "Cannot retrieve certificate" <NEW_LINE> <DEDENT> elif self._code == ValidationError.EXPIRED_CERTIFICATE: <NEW_LINE> <INDENT> result = "Certificate expired" <NEW_LINE> <DEDENT> elif self._code == ValidationError.LOOP_DETECTED: <NEW_LINE> <INDENT> result = "Loop detected in certification chain" <NEW_LINE> <DEDENT> elif self._code == ValidationError.MALFORMED_CERTIFICATE: <NEW_LINE> <INDENT> result = "Malformed certificate" <NEW_LINE> <DEDENT> elif self._code == ValidationError.EXCEEDED_DEPTH_LIMIT: <NEW_LINE> <INDENT> result = "Exceeded validation depth limit" <NEW_LINE> <DEDENT> elif self._code == ValidationError.INVALID_KEY_LOCATOR: <NEW_LINE> <INDENT> result = "Key locator violates validation policy" <NEW_LINE> <DEDENT> elif self._code == ValidationError.POLICY_ERROR: <NEW_LINE> <INDENT> result = "Validation policy error" <NEW_LINE> <DEDENT> elif self._code == ValidationError.IMPLEMENTATION_ERROR: <NEW_LINE> <INDENT> result = "Internal implementation error" <NEW_LINE> <DEDENT> elif self._code >= ValidationError.USER_MIN: <NEW_LINE> <INDENT> result = "Custom error code " + str(self._code) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = "Unrecognized error code " + str(self._code) <NEW_LINE> <DEDENT> if len(self._info) > 0: <NEW_LINE> <INDENT> result += " (" + self._info + ")" <NEW_LINE> <DEDENT> return result | Create a new ValidationError for the given code.
:param int code: The code which is one of the standard error codes such as
ValidationError.INVALID_SIGNATURE, or a custom code if greater than or
equal to ValidationError.USER_MIN .
:param str info: {optinal) The error message. If omitted, use an empty
string. | 6259905d8e71fb1e983bd117 |
class ConcatPool2d(nn.Module): <NEW_LINE> <INDENT> def __init__(self, kernel_sz=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> kernel_sz = kernel_sz or 1 <NEW_LINE> self.ap,self.mp = nn.AvgPool2d(kernel_sz), nn.MaxPool2d(kernel_sz) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return torch.cat([self.mp(x), self.ap(x)], 1) | Layer that concats `AvgPool2d` and `MaxPool2d`. | 6259905d32920d7e50bc7693 |
class Featured(Base): <NEW_LINE> <INDENT> __tablename__ = 'featured' <NEW_LINE> id = Column(Integer, primary_key = True) <NEW_LINE> title = Column(String(80), nullable = False) <NEW_LINE> artist = Column(String(80), nullable = False) <NEW_LINE> genre = Column(String(80)) <NEW_LINE> youtube = Column(String(250)) <NEW_LINE> rendition = Column(String(80)) <NEW_LINE> time_created = Column(DateTime(timezone=True), server_default=func.now()) <NEW_LINE> time_updated = Column(DateTime(timezone=True), onupdate=func.now()) <NEW_LINE> @property <NEW_LINE> def serialize(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'title': self.title, 'artist': self.artist, 'genre': self.genre, 'youtube': "https://www.youtube.com/watch?v=%s" % self.youtube, 'rendition': self.rendition, 'time_created': self.time_created.strftime("%B %d, %Y") } | This class is for songs in the special playlist, Featured.
Attribute:
id (int): Song id, primary key.
title (str): Title of song.
artist (str): Artist of song.
genre (str): Musical genre of song.
youtube (str): Youtube video id.
rendition (str): If the song is a cover or a rendition of an older song.
time_created (datetime): Unix timestamp of when playlist was created.
time_updated (datetime): Unix timestamp of when playlist was updated. | 6259905d0c0af96317c57885 |
class AccountInfoExistsError(CAError): <NEW_LINE> <INDENT> pass | Raised when the account file already exists. | 6259905d7d847024c075da1f |
class LoginTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.browser = Browser() <NEW_LINE> self.browser.visit("http://diabcontrol1.herokuapp.com") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.browser.quit() <NEW_LINE> <DEDENT> def _login(self, username, password): <NEW_LINE> <INDENT> self.browser.fill('username', username) <NEW_LINE> self.browser.fill('password', password + Keys.RETURN) <NEW_LINE> <DEDENT> def test_login_success(self): <NEW_LINE> <INDENT> self._login('admin', 'admin123') <NEW_LINE> page_header = self.browser.find_by_css('.container-fluid .page-header') <NEW_LINE> self.assertIn('Welcome to DiabControl system', page_header.text) <NEW_LINE> <DEDENT> def test_login_failed(self): <NEW_LINE> <INDENT> self._login('admin', 'INVALIDpassword') <NEW_LINE> alert = self.browser.find_by_css('.container-fluid .form .alert') <NEW_LINE> expected_warning = ( 'Please enter a correct username and password. ' 'Note that both fields may be case-sensitive.' ) <NEW_LINE> self.assertIn(expected_warning, alert.text) | Proposed solution for task 3.2
Splinter
Firefox | 6259905d435de62698e9d452 |
class VerifyShortAddress(ShortAddrSpecialCommand): <NEW_LINE> <INDENT> _cmdval=0xb9 <NEW_LINE> _isquery=True <NEW_LINE> _response=YesNoResponse | The ballast shall give an answer "YES" if the received short
address is equal to its own short address. | 6259905d01c39578d7f1425d |
class CloudbaseinitAddUserdata(CloudbaseinitRecipe): <NEW_LINE> <INDENT> def prepare_cbinit_config(self, service_type): <NEW_LINE> <INDENT> super(CloudbaseinitAddUserdata, self).prepare_cbinit_config( service_type) <NEW_LINE> if self._backend.remote_client.manager.os_type != util.WINDOWS_NANO: <NEW_LINE> <INDENT> LOG.info("Injecting userdata_path option in conf file.") <NEW_LINE> self._cbinit_conf.set_conf_value( name='userdata_save_path', value=r'C:\userdatafile') | Recipe for testing that the userdata is being saved on the disk. | 6259905d3cc13d1c6d466d8e |
class ModelsEIP(object): <NEW_LINE> <INDENT> swagger_types = { 'public_ip': 'str' } <NEW_LINE> attribute_map = { 'public_ip': 'publicIP' } <NEW_LINE> def __init__(self, public_ip=None): <NEW_LINE> <INDENT> self._public_ip = None <NEW_LINE> self.discriminator = None <NEW_LINE> if public_ip is not None: <NEW_LINE> <INDENT> self.public_ip = public_ip <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def public_ip(self): <NEW_LINE> <INDENT> return self._public_ip <NEW_LINE> <DEDENT> @public_ip.setter <NEW_LINE> def public_ip(self, public_ip): <NEW_LINE> <INDENT> self._public_ip = public_ip <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(ModelsEIP, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ModelsEIP): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259905d097d151d1a2c26bb |
class _AnsiColorizer(object): <NEW_LINE> <INDENT> _colors = dict(black=30, red=31, green=32, yellow=33, blue=34, magenta=35, cyan=36, white=37) <NEW_LINE> def __init__(self, stream): <NEW_LINE> <INDENT> self.stream = stream <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def supported(cls, stream=sys.stdout): <NEW_LINE> <INDENT> if not stream.isatty(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> import curses <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return curses.tigetnum("colors") > 2 <NEW_LINE> <DEDENT> except curses.error: <NEW_LINE> <INDENT> curses.setupterm() <NEW_LINE> return curses.tigetnum("colors") > 2 <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def write(self, text, color): <NEW_LINE> <INDENT> color = self._colors[color] <NEW_LINE> self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text)) | A colorizer is an object that loosely wraps around a stream, allowing
callers to write text to the stream in a particular color.
Colorizer classes must implement C{supported()} and C{write(text, color)}. | 6259905dbe8e80087fbc06d2 |
class PoLintTool(Tool): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_default_config(cls): <NEW_LINE> <INDENT> config = Tool.get_default_config() <NEW_LINE> config['filters'] = [ r'\.pot?$', ] <NEW_LINE> config['options'] = { 'variable-formats': list(get_available_formats()), } <NEW_LINE> return config <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_all_codes(cls): <NEW_LINE> <INDENT> codes = [] <NEW_LINE> for rule in get_linter_rules().values(): <NEW_LINE> <INDENT> codes.append(( rule.num, rule.desc, )) <NEW_LINE> <DEDENT> for rule in get_template_linter_rules().values(): <NEW_LINE> <INDENT> codes.append(( rule.num, rule.desc, )) <NEW_LINE> <DEDENT> return codes <NEW_LINE> <DEDENT> def execute(self, finder): <NEW_LINE> <INDENT> issues = [] <NEW_LINE> rules = [ rule for rule, _ in self.get_all_codes() if rule not in self.config['disabled'] ] <NEW_LINE> linter = Linter(self.config['options']['variable-formats'], rules) <NEW_LINE> tmpl_linter = TemplateLinter( self.config['options']['variable-formats'], rules, ) <NEW_LINE> for filepath in finder.files(self.config['filters']): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> file_content = finder.read_file(filepath) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> issues.append(self.make_issue(exc, filepath)) <NEW_LINE> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if filepath.endswith('.po'): <NEW_LINE> <INDENT> errors = linter.verify_file(file_content) <NEW_LINE> <DEDENT> elif filepath.endswith('.pot'): <NEW_LINE> <INDENT> errors = tmpl_linter.verify_file(file_content) <NEW_LINE> <DEDENT> <DEDENT> except IOError as exc: <NEW_LINE> <INDENT> issues.append(ParseIssue(exc, filepath)) <NEW_LINE> continue <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> issues.append(self.make_issue(exc, filepath)) <NEW_LINE> continue <NEW_LINE> <DEDENT> issues += [ self.make_issue(error, filepath) for error in errors ] <NEW_LINE> <DEDENT> return issues <NEW_LINE> <DEDENT> def make_issue(self, error, filename): <NEW_LINE> <INDENT> if isinstance(error, LintMessage): <NEW_LINE> <INDENT> issue = PoLintIssue( error.code, error.msg, filename, error.line + 1, ) <NEW_LINE> if error.kind == 'err': <NEW_LINE> <INDENT> issue.pylint_type = 'E' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> issue.pylint_type = 'W' <NEW_LINE> <DEDENT> return issue <NEW_LINE> <DEDENT> if isinstance(error, EnvironmentError): <NEW_LINE> <INDENT> return AccessIssue(error, filename) <NEW_LINE> <DEDENT> return UnknownIssue(error, filename) | A part of the dennis package, this tool lints PO and POT files for
problems. | 6259905d16aa5153ce401b30 |
class PathCompositionInterface(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, strict=False): <NEW_LINE> <INDENT> self._path = Path(force_absolute=self._force_absolute, strict=strict) <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> return self._path <NEW_LINE> <DEDENT> @property <NEW_LINE> def pathstr(self): <NEW_LINE> <INDENT> s = ('furl.pathstr is deprecated. Use str(furl.path) instead. There ' 'should be one, and preferably only one, obvious way to serialize' ' a Path object to a string.') <NEW_LINE> warnings.warn(s, DeprecationWarning) <NEW_LINE> return str(self._path) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def _force_absolute(self, path): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __setattr__(self, attr, value): <NEW_LINE> <INDENT> if attr == '_path': <NEW_LINE> <INDENT> self.__dict__[attr] = value <NEW_LINE> return True <NEW_LINE> <DEDENT> elif attr == 'path': <NEW_LINE> <INDENT> self._path.load(value) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False | Abstract class interface for a parent class that contains a Path. | 6259905d4f6381625f199fc9 |
class SlowMotion(PowerupEffect): <NEW_LINE> <INDENT> runtime = 140.0 <NEW_LINE> symbol = 4 <NEW_LINE> def start(self): <NEW_LINE> <INDENT> self.player = self.state.player <NEW_LINE> snd.play('gameover') <NEW_LINE> game.speedmult += 2 <NEW_LINE> self.ending = 0 <NEW_LINE> self.player.bullet = 1 <NEW_LINE> <DEDENT> def tick(self, speedadjust): <NEW_LINE> <INDENT> PowerupEffect.tick(self, speedadjust) <NEW_LINE> if not self.ending and self.time >= 120.0: <NEW_LINE> <INDENT> self.ending = 1 <NEW_LINE> game.speedmult -= 1 <NEW_LINE> <DEDENT> if self.time <= 100.0: <NEW_LINE> <INDENT> self.player.bullet = (int(self.time * 0.8) % 4) + 1 <NEW_LINE> <DEDENT> <DEDENT> def end(self): <NEW_LINE> <INDENT> self.player.bullet = 0 <NEW_LINE> if self.ending: <NEW_LINE> <INDENT> game.speedmult -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> game.speedmult -= 2 | Bullet Time | 6259905d097d151d1a2c26bc |
class Chat(QMainWindow, main_class): <NEW_LINE> <INDENT> def __init__(self, espera_mensaje_local, parent=None): <NEW_LINE> <INDENT> QMainWindow.__init__(self, parent) <NEW_LINE> self.setupUi(self) <NEW_LINE> <DEDENT> def getTexto(self): <NEW_LINE> <INDENT> msj = str(self.text_send.toPlainText()) <NEW_LINE> self.text_send.clear() <NEW_LINE> return msj <NEW_LINE> <DEDENT> def setTexto(self, mensaje): <NEW_LINE> <INDENT> self.text_receive.insertPlainText(mensaje) | **************************************************
Clase que inicia la ventana del chat.
************************************************** | 6259905dd53ae8145f919aaf |
class NewEdXPageExtractor(CurrentEdXPageExtractor): <NEW_LINE> <INDENT> def extract_sections_from_html(self, page, BASE_URL): <NEW_LINE> <INDENT> def _make_url(section_soup): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return section_soup.a['href'] <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def _get_section_name(section_soup): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return section_soup.button.h3.string.strip() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def _make_subsections(section_soup): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> subsections_soup = section_soup.find_all('li', class_=['subsection']) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> subsections = [SubSection(position=i, url=s.a['href'], name=s.a.h4.string.strip()) for i, s in enumerate(subsections_soup, 1)] <NEW_LINE> return subsections <NEW_LINE> <DEDENT> soup = BeautifulSoup(page) <NEW_LINE> sections_soup = soup.find_all('li', class_=['outline-item','section']) <NEW_LINE> sections = [Section(position=i, name=_get_section_name(section_soup), url=_make_url(section_soup), subsections=_make_subsections(section_soup)) for i, section_soup in enumerate(sections_soup, 1)] <NEW_LINE> sections = [section for section in sections if section.name] <NEW_LINE> return sections | A new page extractor for the latest changes in layout of edx | 6259905da219f33f346c7e54 |
class PolicyStatus(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> ENABLED = "enabled" <NEW_LINE> DISABLED = "disabled" | The value that indicates whether the policy is enabled or not.
| 6259905d9c8ee82313040cb1 |
class GPKernelHandle: <NEW_LINE> <INDENT> ARD_KERNELS = ["RBF", "Matern"] <NEW_LINE> def __init__( self, kernels: List[str] = None, kernel_kwargs: List[dict] = None, ard: bool = True, ): <NEW_LINE> <INDENT> if kernels is None: <NEW_LINE> <INDENT> kernels = ["RBF", "WhiteKernel"] <NEW_LINE> <DEDENT> self.kernels: List[str] = kernels <NEW_LINE> if kernel_kwargs is None: <NEW_LINE> <INDENT> kernel_kwargs = [{} for _ in self.kernels] <NEW_LINE> <DEDENT> self.kernel_kwargs = kernel_kwargs <NEW_LINE> self.ard: bool = ard <NEW_LINE> <DEDENT> def __call__(self, n_in: int) -> "skl_gp.kernels.Kernel": <NEW_LINE> <INDENT> kernels = [ getattr(skl_gp.kernels, kernel)( length_scale=np.ones(n_in), **kernel_kwargs ) if self.ard and kernel in GPKernelHandle.ARD_KERNELS else getattr(skl_gp.kernels, kernel)(**kernel_kwargs) for kernel, kernel_kwargs in zip(self.kernels, self.kernel_kwargs) ] <NEW_LINE> return sum(kernels) | Convenience class for Gaussian process kernel construction.
Allows to create kernels depending on problem dimensions. | 6259905d32920d7e50bc7694 |
class TestDocsBaseModel(unittest.TestCase): <NEW_LINE> <INDENT> def test_module(self): <NEW_LINE> <INDENT> self.assertTrue(len(file_storage.__doc__) > 0) <NEW_LINE> <DEDENT> def test_class(self): <NEW_LINE> <INDENT> self.assertTrue(len(FileStorage.__doc__) > 0) <NEW_LINE> <DEDENT> def test_method(self): <NEW_LINE> <INDENT> for func in dir(FileStorage): <NEW_LINE> <INDENT> self.assertTrue(len(func.__doc__) > 0) <NEW_LINE> <DEDENT> <DEDENT> def test_permissions(self): <NEW_LINE> <INDENT> read = os.access('models/engine/file_storage.py', os.R_OK) <NEW_LINE> self.assertTrue(read) <NEW_LINE> write = os.access('models/engine/file_storage.py', os.W_OK) <NEW_LINE> self.assertTrue(write) <NEW_LINE> exe = os.access('models/engine/file_storage.py', os.X_OK) <NEW_LINE> self.assertTrue(exe) <NEW_LINE> <DEDENT> def test_instance(self): <NEW_LINE> <INDENT> obj = FileStorage() <NEW_LINE> self.assertIsInstance(obj, FileStorage) | test docstrings for base and test_base files | 6259905d379a373c97d9a673 |
class Marc698Field(EmbeddedDocument): <NEW_LINE> <INDENT> a = StringField(max_length=2000) | 698a = Disciplina de la revista | 6259905d3eb6a72ae038bcae |
class Settings: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 1200 <NEW_LINE> self.screen_height = 800 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_speed = 1.5 <NEW_LINE> self.ship_limit = 3 <NEW_LINE> self.bullet_speed = 1.5 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = (60, 60, 60) <NEW_LINE> self.bullets_allowed = 3 <NEW_LINE> self.alien_speed = 1.0 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.fleet_direction = 1 <NEW_LINE> self.alien_points = 15 | A class to store all settings for Alien Invasion. | 6259905d1f5feb6acb164238 |
class Student: <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, age): <NEW_LINE> <INDENT> self.first_name = first_name <NEW_LINE> self.last_name = last_name <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return self.__dict__ | class that defines student | 6259905d97e22403b383c55b |
class SpointCalculator(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.func = None <NEW_LINE> self.binning = None <NEW_LINE> self.binning_mode = "integrate" <NEW_LINE> self.normalize = False <NEW_LINE> self.kwargs = {} <NEW_LINE> <DEDENT> def _prepare_spoint(self, spoint): <NEW_LINE> <INDENT> return spoint <NEW_LINE> <DEDENT> def calc(self, spoint) -> np.array: <NEW_LINE> <INDENT> spoint = self._prepare_spoint(spoint) <NEW_LINE> if self.binning is not None: <NEW_LINE> <INDENT> if self.binning_mode == "integrate": <NEW_LINE> <INDENT> return clusterking.maths.binning.bin_function( functools.partial(self.func, spoint, **self.kwargs), self.binning, normalize=self.normalize, ) <NEW_LINE> <DEDENT> elif self.binning_mode == "sample": <NEW_LINE> <INDENT> func = functools.partial(self.func, spoint, **self.kwargs) <NEW_LINE> res = np.array(list(map(func, self.binning))) <NEW_LINE> if self.normalize: <NEW_LINE> <INDENT> res /= sum(res) <NEW_LINE> <DEDENT> print("results", res) <NEW_LINE> return res <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self.func(spoint, **self.kwargs) | A class that holds the function with which we calculate each
point in sample space. Note that this has to be a separate class from
Scanner to avoid problems related to multiprocessing's use of the pickle
library, which are described here:
https://stackoverflow.com/questions/1412787/ | 6259905d0c0af96317c57886 |
class State(Base): <NEW_LINE> <INDENT> __tablename__ = 'states' <NEW_LINE> id = Column(Integer, autoincrement=True, primary_key=True, nullable=False) <NEW_LINE> name = Column(String(128), nullable=False) | State class definition | 6259905d24f1403a926863f5 |
class GoogleCloudVisionV1p3beta1Product(_messages.Message): <NEW_LINE> <INDENT> description = _messages.StringField(1) <NEW_LINE> displayName = _messages.StringField(2) <NEW_LINE> name = _messages.StringField(3) <NEW_LINE> productCategory = _messages.StringField(4) <NEW_LINE> productLabels = _messages.MessageField('GoogleCloudVisionV1p3beta1ProductKeyValue', 5, repeated=True) | A Product contains ReferenceImages.
Fields:
description: User-provided metadata to be stored with this product. Must
be at most 4096 characters long.
displayName: The user-provided name for this Product. Must not be empty.
Must be at most 4096 characters long.
name: The resource name of the product. Format is:
`projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`. This field
is ignored when creating a product.
productCategory: The category for the product identified by the reference
image. This should be either "homegoods" or "apparel". This field is
immutable.
productLabels: Key-value pairs that can be attached to a product. At query
time, constraints can be specified based on the product_labels. Note
that integer values can be provided as strings, e.g. "1199". Only
strings with integer values can match a range-based restriction which is
to be supported soon. Multiple values can be assigned to the same key.
One product may have up to 100 product_labels. | 6259905d1b99ca400229005e |
class Meta(models.Model): <NEW_LINE> <INDENT> meta = models.ForeignKey(Config, on_delete=models.CASCADE) <NEW_LINE> meta_data = models.CharField(default='', max_length=64) | : The class: "Meta", is part of module: "models".
Meta request data. | 6259905d63d6d428bbee3daf |
class DiscordEventarrator(Eventarrator): <NEW_LINE> <INDENT> def __init__(self, client: Client, channel: DiscordTextChannel, **options): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self.channel = channel <NEW_LINE> users = options.pop("users", None) <NEW_LINE> if users: <NEW_LINE> <INDENT> if isinstance(users, Iterable): <NEW_LINE> <INDENT> users = tuple(users) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> users = (users,) <NEW_LINE> <DEDENT> <DEDENT> self.users = users <NEW_LINE> self.message_check = options.pop("message_check", None) <NEW_LINE> self.options = options <NEW_LINE> self.sent_messages = deque(maxlen=10) <NEW_LINE> <DEDENT> async def output(self, out: str): <NEW_LINE> <INDENT> if not out: <NEW_LINE> <INDENT> log.warning("Not outputting empty message") <NEW_LINE> return <NEW_LINE> <DEDENT> msg: Message = await send_message(self.client, self.channel, out) <NEW_LINE> self.sent_messages.appendleft(msg.id) <NEW_LINE> log.debug(f"sent \"{out}\"") <NEW_LINE> <DEDENT> def input_filter(self, msg: Message) -> bool: <NEW_LINE> <INDENT> if self.channel.id != msg.channel.id: <NEW_LINE> <INDENT> log.debug(f"Ignoring {msg} (wrong channel)!") <NEW_LINE> return False <NEW_LINE> <DEDENT> if msg.id in self.sent_messages: <NEW_LINE> <INDENT> log.debug(f"Ignoring {msg} (sent by Eventory)!") <NEW_LINE> return False <NEW_LINE> <DEDENT> if self.users and msg.author not in self.users: <NEW_LINE> <INDENT> log.debug(f"Ignoring {msg} (not sent by {self.users})!") <NEW_LINE> return False <NEW_LINE> <DEDENT> if msg.author.bot: <NEW_LINE> <INDENT> log.debug(f"Ignoring {msg} (Message sent by bot)!") <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> async def input_check(self, msg: Message) -> bool: <NEW_LINE> <INDENT> if self.message_check: <NEW_LINE> <INDENT> log.debug(f"Running custom message check!") <NEW_LINE> ret = self.message_check(msg) <NEW_LINE> if inspect.iscoroutine(ret): <NEW_LINE> <INDENT> ret = await ret <NEW_LINE> <DEDENT> return bool(ret) <NEW_LINE> <DEDENT> if isinstance(self.client, Bot): <NEW_LINE> <INDENT> ctx = await get_context(self.client, msg) <NEW_LINE> if ctx.command: <NEW_LINE> <INDENT> log.debug(f"Ignoring {msg} (Command detected)!") <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> async def input(self) -> str: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> msg = await wait_for_message(self.client, check=self.input_filter) <NEW_LINE> if await self.input_check(msg): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return msg.content | The Discord Eventarrator that does the actual work.
Args:
client: Client to use
channel: TextChannel to play the Eventory in
users (Optional[Union[User, Sequence[User]]]): The user or a list of users who play(s) the Eventory
message_check (Optional[Callable[[Message], Union[bool, Awaitable[bool]]]): A function or coroutine function which takes a message and returns
a boolean to denote whether this message is valid input or not
Attributes:
client (Client)
channel (DiscordTextChannel)
users (Optional[Set[User]]): Set of users to listen to
message_check (Optional[Callable[[Message], Union[bool, Awaitable[bool]]])
options (dict): Leftover keyword arguments passed to the constructor
sent_messages (deque): Deque containing the ids of the last 10 messages sent by the Eventarrator | 6259905d0a50d4780f7068e6 |
class Aceptacion(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.aceptacion = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def fecha_aceptacion(self): <NEW_LINE> <INDENT> fecha_aceptacion = '' <NEW_LINE> try: <NEW_LINE> <INDENT> fecha_aceptacion = self.aceptacion.FechaAceptacion.text <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return fecha_aceptacion | Classe que implementa la aceptació | 6259905dd486a94d0ba2d617 |
class ImageFile(Plugin): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Plugin.__init__(self) <NEW_LINE> self.help = "Realiza a aquisição de uma imagem a " + "partir de algum dispositivo, " + "seja este uma mídia ou um dispositivo de " + "aquisição de imagens (câmera, scanner)." <NEW_LINE> self.label = "Image File" <NEW_LINE> self.color = "50:100:200:150" <NEW_LINE> self.out_ports = [{"type":"harpia.extensions.c.ports.image", "name":"output_image", "label":"Output Image"}] <NEW_LINE> self.group = "Image Source" <NEW_LINE> self.properties = [{"label": "File Name", "name": "filename", "type": HARPIA_OPEN_FILE, "value":"/usr/share/harpia/images/lenna.png" } ] <NEW_LINE> self.codes[1] = 'IplImage * block$id$_img_o0 = NULL;\n' <NEW_LINE> self.codes[1] += 'block$id$_img_o0 = cvLoadImage("$prop[filename]$",-1);\n' <NEW_LINE> self.codes[4] = "cvReleaseImage(&block$id$_img_o0);\n" <NEW_LINE> self.language = "c" <NEW_LINE> self.framework = "opencv" | This class contains methods related the ImageFile class. | 6259905d8e7ae83300eea6dd |
class ShareDetailView(HTTPMethodView): <NEW_LINE> <INDENT> @share_bp.get('/<pk>', name='share_detail') <NEW_LINE> async def get(self, pk=None): <NEW_LINE> <INDENT> share = SharePoints.select().where(SharePoints.id == pk).first() <NEW_LINE> if share: <NEW_LINE> <INDENT> share.click_nums += 1 <NEW_LINE> share.save() <NEW_LINE> serializer = model_to_dict(share) <NEW_LINE> return HTTPResponse(sjson.dumps(serializer, default=str)) <NEW_LINE> <DEDENT> return json({}, status=404) <NEW_LINE> <DEDENT> async def post(self, request): <NEW_LINE> <INDENT> import pdb <NEW_LINE> pdb.set_trace() <NEW_LINE> data = request.form <NEW_LINE> url = data.get('url', '') <NEW_LINE> url_type = data.get('url_type', 1) <NEW_LINE> user_id = data.get('user_id', 0) <NEW_LINE> share = SharePoints.create( user_id=user_id, url=url, url_type=url_type, click_nums=0 ) <NEW_LINE> serializer = model_to_dict(share) <NEW_LINE> return HTTPResponse(sjson.dumps(serializer, default=str), status=201) <NEW_LINE> <DEDENT> @share_bp.delete('/<pk>', name='delete-share') <NEW_LINE> async def delete(self, pk=None): <NEW_LINE> <INDENT> share = SharePoints.delete().where(SharePoints.id == pk) <NEW_LINE> return json({'success': True}, status=204) <NEW_LINE> <DEDENT> @share_bp.get('/list', name='get_all_shares') <NEW_LINE> async def get_all_shares(self): <NEW_LINE> <INDENT> page = int(self.raw_args.get('page', 1)) <NEW_LINE> size = int(self.raw_args.get('size', PAGE_SIZE)) <NEW_LINE> shares = SharePoints.select().order_by( '-create_time' ).paginate(page, PAGE_SIZE) <NEW_LINE> serializer = [model_to_dict(share) for share in shares.iterator()] <NEW_LINE> res = { 'count': shares.count(), 'page': page, 'size': size, 'results': serializer, } <NEW_LINE> return HTTPResponse(sjson.dumps(res, default=str), status=200) | 分享链接 | 6259905d460517430c432b7a |
class WatcherFunc(BaseWatcherDirective): <NEW_LINE> <INDENT> option_spec = {'format': rst.directives.unchanged} <NEW_LINE> has_content = True <NEW_LINE> def run(self): <NEW_LINE> <INDENT> if not self.content: <NEW_LINE> <INDENT> error = self.state_machine.reporter.error( 'The "%s" directive is empty; content required.' % self.name, nodes.literal_block(self.block_text, self.block_text), line=self.lineno) <NEW_LINE> return [error] <NEW_LINE> <DEDENT> func_path = self.content[0] <NEW_LINE> try: <NEW_LINE> <INDENT> cls_path, func_name = func_path.rsplit('.', 1) <NEW_LINE> module_name, cls_name = cls_path.rsplit('.', 1) <NEW_LINE> mod = importlib.import_module(module_name) <NEW_LINE> cls = getattr(mod, cls_name) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> raise self.error(exc) <NEW_LINE> <DEDENT> cls_obj = cls() <NEW_LINE> func = getattr(cls_obj, func_name) <NEW_LINE> textblock = func() <NEW_LINE> if not isinstance(textblock, str): <NEW_LINE> <INDENT> textblock = str(textblock) <NEW_LINE> <DEDENT> self.add_textblock(textblock) <NEW_LINE> try: <NEW_LINE> <INDENT> node_class = getattr(nodes, self.options.get('format', 'paragraph')) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> raise self.error(exc) <NEW_LINE> <DEDENT> node = node_class() <NEW_LINE> node.document = self.state.document <NEW_LINE> self.state.nested_parse(self.result, 0, node) <NEW_LINE> return [node] | Directive to import a value returned by a func into the Watcher doc
**How to use it**
# inside your .py file
class Bar(object):
def foo(object):
return foo_string
# Inside your .rst file
.. watcher-func:: import.path.to.your.Bar.foo node_classname
node_classname is decumented here:
http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html
This directive will then import the value and then interpret it. | 6259905dadb09d7d5dc0bbba |
class ImagePackageManifestEntry(Base): <NEW_LINE> <INDENT> __tablename__ = 'image_package_db_entries' <NEW_LINE> image_id = Column(String(image_id_length), primary_key=True) <NEW_LINE> image_user_id = Column(String(user_id_length), primary_key=True) <NEW_LINE> pkg_name = Column(String(pkg_name_length), primary_key=True) <NEW_LINE> pkg_version = Column(String(pkg_version_length), primary_key=True) <NEW_LINE> pkg_type = Column(String(pkg_type_length), primary_key=True) <NEW_LINE> pkg_arch = Column(String(16), default='N/A', primary_key=True) <NEW_LINE> file_path = Column(String(file_path_length), primary_key=True) <NEW_LINE> is_config_file = Column(Boolean, nullable=True) <NEW_LINE> digest = Column(String(digest_length)) <NEW_LINE> digest_algorithm = Column(String(8), nullable=True) <NEW_LINE> file_group_name = Column(String, nullable=True) <NEW_LINE> file_user_name = Column(String, nullable=True) <NEW_LINE> mode = Column(Integer, nullable=True) <NEW_LINE> size = Column(Integer, nullable=True) <NEW_LINE> __table_args__ = ( ForeignKeyConstraint(columns=[image_id, image_user_id, pkg_name, pkg_version, pkg_type, pkg_arch], refcolumns=['image_packages.image_id', 'image_packages.image_user_id', 'image_packages.name', 'image_packages.version', 'image_packages.pkg_type', 'image_packages.arch']), {} ) | An entry from the package manifest (e.g. rpm, deb, apk) for verifying package contents in a generic way. | 6259905da219f33f346c7e56 |
class Listing(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'listing' <NEW_LINE> verbose_name_plural = 'listings' <NEW_LINE> <DEDENT> user = models.ForeignKey(User, help_text="User") <NEW_LINE> category = models.ForeignKey(Category, default=1, help_text="") <NEW_LINE> title = models.CharField(max_length=1024, null=False, blank=False, help_text="") <NEW_LINE> description = models.TextField(null=True, blank=True, help_text="") <NEW_LINE> required_skills = models.TextField(null=True, blank=True, verbose_name="Required skills", help_text="") <NEW_LINE> location = geomodels.PointField(srid=settings.SPHERICAL_MERCATOR, help_text="Location of the job") <NEW_LINE> country = models.CharField(max_length=255, null=False, blank=False, default="Germany", help_text="") <NEW_LINE> city = models.CharField(max_length=255, null=False, blank=False, default="Berlin", help_text="") <NEW_LINE> zipcode = models.CharField(max_length=20, null=False, blank=False, default=12167, verbose_name="Zip code", help_text="") <NEW_LINE> address = models.CharField(max_length=1024, null=False, blank=False, help_text="") <NEW_LINE> mobile_number = models.CharField(max_length=30, verbose_name="Mobile number", help_text="") <NEW_LINE> payment_type = models.IntegerField(null=False, blank=False, choices=PAYMENT_TYPE_CHOICES, default=PAYMENT_TYPE['HOURLY'], verbose_name="Payment type", help_text="Payment type (Fixed or hourly)") <NEW_LINE> working_hours = models.IntegerField(verbose_name="Working hours", help_text="Approximate number of hours required") <NEW_LINE> payment_rate = models.IntegerField(verbose_name="Payment rate", help_text="Payment per hour or fixed price") <NEW_LINE> objects = geomodels.GeoManager() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.title | Listing
Represents a job offer | 6259905d1f037a2d8b9e5393 |
class PaymentInitiationPaymentCreateResponse(ModelNormal): <NEW_LINE> <INDENT> allowed_values = { } <NEW_LINE> validations = { } <NEW_LINE> @cached_property <NEW_LINE> def additional_properties_type(): <NEW_LINE> <INDENT> lazy_import() <NEW_LINE> return (bool, date, datetime, dict, float, int, list, str, none_type,) <NEW_LINE> <DEDENT> _nullable = False <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> lazy_import() <NEW_LINE> return { 'payment_id': (str,), 'status': (PaymentInitiationPaymentCreateStatus,), 'request_id': (str,), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = { 'payment_id': 'payment_id', 'status': 'status', 'request_id': 'request_id', } <NEW_LINE> _composed_schemas = {} <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, payment_id, status, request_id, *args, **kwargs): <NEW_LINE> <INDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> self.payment_id = payment_id <NEW_LINE> self.status = status <NEW_LINE> self.request_id = request_id <NEW_LINE> for var_name, var_value in kwargs.items(): <NEW_LINE> <INDENT> if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> setattr(self, var_name, var_value) | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values. | 6259905d32920d7e50bc7696 |
class CopyTemplate: <NEW_LINE> <INDENT> def __init__(self, form, user): <NEW_LINE> <INDENT> if not DB_FILE or ";" in DB_FILE: <NEW_LINE> <INDENT> print_error("PROBLEM WITH DATABASE", DB_FILE) <NEW_LINE> <DEDENT> args = get_args(form) <NEW_LINE> self.run(args.get('id')) <NEW_LINE> <DEDENT> def run(self, template_id): <NEW_LINE> <INDENT> if template_id: <NEW_LINE> <INDENT> status = self.form_entry(template_id) <NEW_LINE> if not status: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> print("""<p3>%s</p3>""" % status) <NEW_LINE> <DEDENT> self.create_selection() <NEW_LINE> <DEDENT> def form_entry(self, template_id): <NEW_LINE> <INDENT> with sqlite3.connect(DB_FILE) as conn: <NEW_LINE> <INDENT> conn.row_factory = sqlite3.Row <NEW_LINE> cursor = conn.cursor() <NEW_LINE> cursor.execute( 'select * from template where id = ?', (template_id, )) <NEW_LINE> row = cursor.fetchone() <NEW_LINE> <DEDENT> if not row: <NEW_LINE> <INDENT> return "No Template for id %s" % template_id <NEW_LINE> <DEDENT> form = EntryForm() <NEW_LINE> form.create_form(row) <NEW_LINE> print(form.page) <NEW_LINE> return None <NEW_LINE> <DEDENT> def create_selection(self): <NEW_LINE> <INDENT> with sqlite3.connect(DB_FILE) as conn: <NEW_LINE> <INDENT> conn.row_factory = sqlite3.Row <NEW_LINE> cursor = conn.cursor() <NEW_LINE> rows = cursor.execute('select * from template').fetchall() <NEW_LINE> rows = [ dict(id=x['id'], description=x['description']) for x in rows ] <NEW_LINE> <DEDENT> input_ = dict( SCRIPT_NAME=SCRIPT_NAME, title="Create Course From Template", h1="Choose Template", templates=rows) <NEW_LINE> file_loader = FileSystemLoader('templates') <NEW_LINE> env = Environment(loader=file_loader) <NEW_LINE> template = env.get_template('picktemplate.html') <NEW_LINE> output = template.render(input_) <NEW_LINE> print(output) | Manage form for creating defaulted entry from template | 6259905d4e4d562566373a57 |
class FirethornCheckerResults(object): <NEW_LINE> <INDENT> def __init__(self, exceptions={}, message=""): <NEW_LINE> <INDENT> self.exceptions = exceptions <NEW_LINE> self.message = message <NEW_LINE> return | Firethorn Health Checker Results Class, stores information from a health check run | 6259905d45492302aabfdb29 |
class CreateDistanceCallback(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> locations = create_data_array() <NEW_LINE> size = len(locations) <NEW_LINE> self.matrix = {} <NEW_LINE> for from_node in xrange(size): <NEW_LINE> <INDENT> self.matrix[from_node] = {} <NEW_LINE> for to_node in xrange(size): <NEW_LINE> <INDENT> if from_node == to_node: <NEW_LINE> <INDENT> self.matrix[from_node][to_node] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x1 = locations[from_node][0] <NEW_LINE> y1 = locations[from_node][1] <NEW_LINE> x2 = locations[to_node][0] <NEW_LINE> y2 = locations[to_node][1] <NEW_LINE> self.matrix[from_node][to_node] = distance(x1, y1, x2, y2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def Distance(self, from_node, to_node): <NEW_LINE> <INDENT> return int(self.matrix[from_node][to_node]) | Create callback to calculate distances between points. | 6259905d07f4c71912bb0a8c |
class _AsyncSFTPServer(SFTPServer): <NEW_LINE> <INDENT> @asyncio.coroutine <NEW_LINE> def format_longname(self, name): <NEW_LINE> <INDENT> return super().format_longname(name) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def open(self, path, pflags, attrs): <NEW_LINE> <INDENT> return super().open(path, pflags, attrs) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def close(self, file_obj): <NEW_LINE> <INDENT> super().close(file_obj) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def read(self, file_obj, offset, size): <NEW_LINE> <INDENT> return super().read(file_obj, offset, size) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def write(self, file_obj, offset, data): <NEW_LINE> <INDENT> return super().write(file_obj, offset, data) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def lstat(self, path): <NEW_LINE> <INDENT> return super().lstat(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def fstat(self, file_obj): <NEW_LINE> <INDENT> return super().fstat(file_obj) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def setstat(self, path, attrs): <NEW_LINE> <INDENT> super().setstat(path, attrs) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def fsetstat(self, file_obj, attrs): <NEW_LINE> <INDENT> super().fsetstat(file_obj, attrs) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def listdir(self, path): <NEW_LINE> <INDENT> return super().listdir(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def remove(self, path): <NEW_LINE> <INDENT> super().remove(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def mkdir(self, path, attrs): <NEW_LINE> <INDENT> super().mkdir(path, attrs) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def rmdir(self, path): <NEW_LINE> <INDENT> super().rmdir(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def realpath(self, path): <NEW_LINE> <INDENT> return super().realpath(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def stat(self, path): <NEW_LINE> <INDENT> return super().stat(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def rename(self, oldpath, newpath): <NEW_LINE> <INDENT> super().rename(oldpath, newpath) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def readlink(self, path): <NEW_LINE> <INDENT> return super().readlink(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def symlink(self, oldpath, newpath): <NEW_LINE> <INDENT> super().symlink(oldpath, newpath) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def posix_rename(self, oldpath, newpath): <NEW_LINE> <INDENT> super().posix_rename(oldpath, newpath) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def statvfs(self, path): <NEW_LINE> <INDENT> return super().statvfs(path) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def fstatvfs(self, file_obj): <NEW_LINE> <INDENT> return super().fstatvfs(file_obj) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def link(self, oldpath, newpath): <NEW_LINE> <INDENT> super().link(oldpath, newpath) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def fsync(self, file_obj): <NEW_LINE> <INDENT> super().fsync(file_obj) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def exit(self): <NEW_LINE> <INDENT> super().exit() | Implement all SFTP callbacks as coroutines | 6259905d99cbb53fe6832530 |
class KafkaConsumer: <NEW_LINE> <INDENT> def __init__( self, topic_name_pattern, message_handler, is_avro=True, offset_earliest=False, sleep_secs=1.0, consume_timeout=0.1, ): <NEW_LINE> <INDENT> self.topic_name_pattern = topic_name_pattern <NEW_LINE> self.message_handler = message_handler <NEW_LINE> self.sleep_secs = sleep_secs <NEW_LINE> self.consume_timeout = consume_timeout <NEW_LINE> self.offset_earliest = offset_earliest <NEW_LINE> self.broker_properties = { "bootstrap.servers": "PLAINTEXT://localhost:9092", "group.id": "0" } <NEW_LINE> if self.offset_earliest: <NEW_LINE> <INDENT> self.broker_properties["auto.offset.reset"] = "earliest" <NEW_LINE> <DEDENT> if is_avro is True: <NEW_LINE> <INDENT> self.broker_properties["schema.registry.url"] = "http://localhost:8081" <NEW_LINE> self.consumer = AvroConsumer(self.broker_properties) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.consumer = Consumer(self.broker_properties) <NEW_LINE> <DEDENT> self.consumer.subscribe([self.topic_name_pattern], on_assign=self.on_assign) <NEW_LINE> <DEDENT> def on_assign(self, consumer, partitions): <NEW_LINE> <INDENT> logger.info("on_assign is incomplete - skipping") <NEW_LINE> for partition in partitions: <NEW_LINE> <INDENT> if self.offset_earliest: <NEW_LINE> <INDENT> partition.offset = OFFSET_BEGINNING <NEW_LINE> <DEDENT> <DEDENT> logger.info("partitions assigned for %s", self.topic_name_pattern) <NEW_LINE> consumer.assign(partitions) <NEW_LINE> <DEDENT> async def consume(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> num_results = 1 <NEW_LINE> while num_results > 0: <NEW_LINE> <INDENT> num_results = self._consume() <NEW_LINE> <DEDENT> await gen.sleep(self.sleep_secs) <NEW_LINE> <DEDENT> <DEDENT> def _consume(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> messages = self.consumer.consume(10, timeout=self.consume_timeout) <NEW_LINE> <DEDENT> except SerializerError: <NEW_LINE> <INDENT> print(f"Deserialization failed !!!") <NEW_LINE> return 0 <NEW_LINE> <DEDENT> if not messages: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if messages.error(): <NEW_LINE> <INDENT> print(f"Consumer Errors {messages.error()}") <NEW_LINE> return 0 <NEW_LINE> <DEDENT> print(f"consumed {len(messages)} messages") <NEW_LINE> for message in messages: <NEW_LINE> <INDENT> print(f"consume and handle message {message.key()}: {message.value()}") <NEW_LINE> self.message_handler(message) <NEW_LINE> <DEDENT> await asyncio.sleep(0.01) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.consumer.close() | Defines the base kafka consumer class | 6259905d24f1403a926863f6 |
class Provider(metaclass=ABCMeta): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def make_request(self, method, params=None): <NEW_LINE> <INDENT> raise NotImplementedError("Providers must implement this method") <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def is_connected(self): <NEW_LINE> <INDENT> raise NotImplementedError("Providers must implement this method") | The provider defines how the IconService connects to Loopchain. | 6259905da17c0f6771d5d6cb |
class CommandPWD(Command): <NEW_LINE> <INDENT> def set_args(self, args): <NEW_LINE> <INDENT> self.__args = args <NEW_LINE> <DEDENT> def run(self, input, env): <NEW_LINE> <INDENT> self.__output = Stream() <NEW_LINE> return_value = 0 <NEW_LINE> if self.__args: <NEW_LINE> <INDENT> self.__output.write_line('Wrong number of arguments for pwd command:' ' expected 0, got {}.'.format(len(self.__args))) <NEW_LINE> return_value = 1 <NEW_LINE> return CommandResult(self.__output, env, return_value) <NEW_LINE> <DEDENT> self.__output.write_line(env.get_cwd()) <NEW_LINE> return CommandResult(self.__output, env, return_value) | The 'pwd' command prints name of current/working directory. | 6259905d4428ac0f6e659b8e |
class TimeDeltaFormat(TimeFormat): <NEW_LINE> <INDENT> def _check_scale(self, scale): <NEW_LINE> <INDENT> if scale is not None and scale not in TIME_DELTA_SCALES: <NEW_LINE> <INDENT> raise ScaleValueError("Scale value '{0}' not in " "allowed values {1}" .format(scale, TIME_DELTA_SCALES)) <NEW_LINE> <DEDENT> return scale <NEW_LINE> <DEDENT> def set_jds(self, val1, val2): <NEW_LINE> <INDENT> self._check_scale(self._scale) <NEW_LINE> self.jd1, self.jd2 = day_frac(val1, val2, divisor=1./self.unit) <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return (self.jd1 + self.jd2) / self.unit | Base class for time delta representations | 6259905d63d6d428bbee3db0 |
class FairsConfig(AppConfig): <NEW_LINE> <INDENT> name = "myhandycrafts.fairs" <NEW_LINE> verbose_name = _("Fairs") | config for fairs | 6259905d435de62698e9d456 |
class Queue(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.list = [] <NEW_LINE> <DEDENT> def insert(self, elem): <NEW_LINE> <INDENT> self.list.append(elem) <NEW_LINE> <DEDENT> def remove(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.list.pop(0) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise ValueError | Implements a Queue | 6259905d76e4537e8c3f0bdd |
class StorageElementTestCase( unittest.TestCase ): <NEW_LINE> <INDENT> def setUp( self ): <NEW_LINE> <INDENT> self.numberOfFiles = 1 <NEW_LINE> self.storageElement = StorageElement( storageElementToTest ) <NEW_LINE> self.localSourceFile = fileToTest <NEW_LINE> self.localFileSize = getSize( self.localSourceFile ) <NEW_LINE> self.destDirectory = lfnDirToTest <NEW_LINE> destinationDir = self.destDirectory <NEW_LINE> res = self.storageElement.createDirectory( destinationDir ) <NEW_LINE> self.assertTrue(res['OK']) <NEW_LINE> <DEDENT> def tearDown( self ): <NEW_LINE> <INDENT> res = self.storageElement.removeDirectory( self.destDirectory, recursive = True ) <NEW_LINE> self.assertTrue(res['OK']) | Base class for the StorageElement test cases
| 6259905dd486a94d0ba2d619 |
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError("User must have an email address") <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user = self.create_user(email, name, password) <NEW_LINE> user.is_superuser = True <NEW_LINE> user.is_staff = True <NEW_LINE> user.save(using=self._db) | Manager for user profiles | 6259905d01c39578d7f1425f |
class Player: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def get_player_ships(self, board, ships): <NEW_LINE> <INDENT> for ship in ships: <NEW_LINE> <INDENT> board.ship_on_board(ship) <NEW_LINE> board.print_board(board.board) <NEW_LINE> <DEDENT> <DEDENT> def fire(self, hit_board, show_board): <NEW_LINE> <INDENT> value = list(input ("Enter location to hit {}: ".format(self.name).lower())) <NEW_LINE> result = self.board.validate_play(value, hit_board, show_board) <NEW_LINE> if result[0] == 'hit': <NEW_LINE> <INDENT> print('You HIT') <NEW_LINE> return result[1], result[2] <NEW_LINE> <DEDENT> elif result[0] == 'miss': <NEW_LINE> <INDENT> print('You missed') <NEW_LINE> return result[1], result[2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(result[0]) <NEW_LINE> self.fire(hit_board, show_board) | Sets player name, ships available to the player and their orientation | 6259905d498bea3a75a59126 |
class UserCreateTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.username = "testuser" <NEW_LINE> self.password = "password" <NEW_LINE> self.email = "[email protected]" <NEW_LINE> self.first_name = "John" <NEW_LINE> self.last_name = "Sample" <NEW_LINE> self.affiliation = "University" <NEW_LINE> self.public_group_name = ExtendedGroup.objects.public_group().name <NEW_LINE> <DEDENT> def test_add_new_user_to_public_group(self): <NEW_LINE> <INDENT> new_user = User.objects.create_user(self.username) <NEW_LINE> self.assertEqual( new_user.groups.filter(name=self.public_group_name).count(), 1) <NEW_LINE> <DEDENT> def test_init_user(self): <NEW_LINE> <INDENT> init_user(self.username, self.password, self.email, self.first_name, self.last_name, self.affiliation) <NEW_LINE> new_user = User.objects.get(username=self.username) <NEW_LINE> self.assertEqual( new_user.groups.filter(name=self.public_group_name).count(), 1) | Test User instance creation | 6259905d23e79379d538db4d |
class Graph(Base): <NEW_LINE> <INDENT> __tablename__ = 'graph' <NEW_LINE> id = Column('graph_id', Integer, primary_key=True) <NEW_LINE> calculation_id = Column(Integer, ForeignKey('calculation.calculation_id'), nullable=False) <NEW_LINE> title = Column(String(255), nullable=False) <NEW_LINE> created = Column(DateTime, default=datetime.datetime.now) <NEW_LINE> updated = Column(DateTime, onupdate=datetime.datetime.now) <NEW_LINE> finished = Column(Boolean, default=False) <NEW_LINE> params = Column(Text, nullable=False) <NEW_LINE> points_count = Column(Integer) <NEW_LINE> data = relationship('Data', order_by='Data.point_x, Data.created', cascade='delete', lazy='select') <NEW_LINE> def __init__(self, calculation_id, title, params): <NEW_LINE> <INDENT> Base.__init__(self) <NEW_LINE> self.calculation_id = calculation_id <NEW_LINE> self.title = title <NEW_LINE> self.params = params <NEW_LINE> self.points_count = 0 <NEW_LINE> self.finished = False <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def list(session, calculation_id): <NEW_LINE> <INDENT> r = None <NEW_LINE> if calculation_id: <NEW_LINE> <INDENT> q = session.query(Graph) .filter(Graph.calculation_id == calculation_id) .order_by(Graph.created.desc()) <NEW_LINE> r = q.all() <NEW_LINE> <DEDENT> return r <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get(session, id): <NEW_LINE> <INDENT> r = None <NEW_LINE> if id: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> r = session.query(Graph).get(id) <NEW_LINE> <DEDENT> except sqlalchemy.exc.InvalidRequestError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return r <NEW_LINE> <DEDENT> def last_image(self, session): <NEW_LINE> <INDENT> r = None <NEW_LINE> if self.id: <NEW_LINE> <INDENT> r = session.query(Data) .filter(Data.graph_id == self.id) .order_by(Data.created.desc()) .first() <NEW_LINE> <DEDENT> return r | График | 6259905de64d504609df9ef7 |
class MXBase(dns.rdata.Rdata): <NEW_LINE> <INDENT> __slots__ = ['preference', 'exchange'] <NEW_LINE> def __init__(self, rdclass, rdtype, preference, exchange): <NEW_LINE> <INDENT> super(MXBase, self).__init__(rdclass, rdtype) <NEW_LINE> self.preference = preference <NEW_LINE> self.exchange = exchange <NEW_LINE> <DEDENT> def to_text(self, origin=None, relativize=True, **kw): <NEW_LINE> <INDENT> exchange = self.exchange.choose_relativity(origin, relativize) <NEW_LINE> return '%d %s' % (self.preference, exchange) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True): <NEW_LINE> <INDENT> preference = tok.get_uint16() <NEW_LINE> exchange = tok.get_name() <NEW_LINE> exchange = exchange.choose_relativity(origin, relativize) <NEW_LINE> tok.get_eol() <NEW_LINE> return cls(rdclass, rdtype, preference, exchange) <NEW_LINE> <DEDENT> def to_wire(self, file, compress=None, origin=None): <NEW_LINE> <INDENT> pref = struct.pack("!H", self.preference) <NEW_LINE> file.write(pref) <NEW_LINE> self.exchange.to_wire(file, compress, origin) <NEW_LINE> <DEDENT> def to_digestable(self, origin=None): <NEW_LINE> <INDENT> return struct.pack("!H", self.preference) + self.exchange.to_digestable(origin) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None): <NEW_LINE> <INDENT> (preference, ) = struct.unpack('!H', wire[current: current + 2]) <NEW_LINE> current += 2 <NEW_LINE> rdlen -= 2 <NEW_LINE> (exchange, cused) = dns.name.from_wire(wire[: current + rdlen], current) <NEW_LINE> if cused != rdlen: <NEW_LINE> <INDENT> raise dns.exception.FormError <NEW_LINE> <DEDENT> if origin is not None: <NEW_LINE> <INDENT> exchange = exchange.relativize(origin) <NEW_LINE> <DEDENT> return cls(rdclass, rdtype, preference, exchange) <NEW_LINE> <DEDENT> def choose_relativity(self, origin=None, relativize=True): <NEW_LINE> <INDENT> self.exchange = self.exchange.choose_relativity(origin, relativize) | Base class for rdata that is like an MX record.
@ivar preference: the preference value
@type preference: int
@ivar exchange: the exchange name
@type exchange: dns.name.Name object | 6259905d460517430c432b7b |
class MoveForms(messages.Message): <NEW_LINE> <INDENT> items = messages.MessageField(MoveForm, 1, repeated=True) | MoveForms for showing all the moves in a game | 6259905d009cb60464d02b87 |
class BracketPluginRunCommand(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def run(self, edit): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Payload.args["edit"] = edit <NEW_LINE> Payload.plugin.run(**Payload.args) <NEW_LINE> Payload.status = True <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> print("BracketHighlighter: Plugin Run Error:\n%s" % str(traceback.format_exc())) | Sublime run command to run BH plugins. | 6259905d56b00c62f0fb3f1d |
class TinyMCEMemo3x3AndroidMainWidget(TinyMCEMemo2x2AndroidMainWidget): <NEW_LINE> <INDENT> plugin_uid = 'tinymce_memo_3x3' <NEW_LINE> cols = 3 <NEW_LINE> rows = 3 | Memo3x3 plugin widget for Android layout (placeholder `main`). | 6259905e1f5feb6acb16423c |
class Map(device.Map): <NEW_LINE> <INDENT> def _to_device(self): <NEW_LINE> <INDENT> if not hasattr(self, '_device_values'): <NEW_LINE> <INDENT> self._device_values = array.to_device(_queue, self._values) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warnings.warn("Copying Map data for %s again, do you really want to do this?" % self) <NEW_LINE> self._device_values.set(self._values, _queue) | OP2 OpenCL map, a relation between two Sets. | 6259905e3539df3088ecd8ee |
class StackedLSTM(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_layers, input_size, rnn_size, dropout): <NEW_LINE> <INDENT> super(StackedLSTM, self).__init__() <NEW_LINE> self.dropout = nn.Dropout(dropout) <NEW_LINE> self.num_layers = num_layers <NEW_LINE> self.layers = nn.ModuleList() <NEW_LINE> for _ in range(num_layers): <NEW_LINE> <INDENT> self.layers.append(nn.LSTMCell(input_size, rnn_size)) <NEW_LINE> input_size = rnn_size <NEW_LINE> <DEDENT> <DEDENT> def forward(self, input_feed, hidden): <NEW_LINE> <INDENT> h_0, c_0 = hidden <NEW_LINE> h_1, c_1 = [], [] <NEW_LINE> for i, layer in enumerate(self.layers): <NEW_LINE> <INDENT> h_1_i, c_1_i = layer(input_feed, (h_0[i], c_0[i])) <NEW_LINE> input_feed = h_1_i <NEW_LINE> if i + 1 != self.num_layers: <NEW_LINE> <INDENT> input_feed = self.dropout(input_feed) <NEW_LINE> <DEDENT> h_1 += [h_1_i] <NEW_LINE> c_1 += [c_1_i] <NEW_LINE> <DEDENT> h_1 = torch.stack(h_1) <NEW_LINE> c_1 = torch.stack(c_1) <NEW_LINE> return input_feed, (h_1, c_1) | Our own implementation of stacked LSTM.
Needed for the decoder, because we do input feeding. | 6259905e07f4c71912bb0a8f |
class IITech3Exception(Exception): <NEW_LINE> <INDENT> pass | The base class for exception thrown by this program. | 6259905e6e29344779b01ca1 |
class UserVideo(db.Model): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_key_name(video_or_youtube_id, user_data): <NEW_LINE> <INDENT> id = video_or_youtube_id <NEW_LINE> if type(id) not in [str, unicode]: <NEW_LINE> <INDENT> id = video_or_youtube_id.youtube_id <NEW_LINE> <DEDENT> return user_data.key_email + ":" + id <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_for_video_and_user_data(video, user_data, insert_if_missing=False): <NEW_LINE> <INDENT> if not user_data: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> key = UserVideo.get_key_name(video, user_data) <NEW_LINE> if insert_if_missing: <NEW_LINE> <INDENT> return UserVideo.get_or_insert( key_name=key, user=user_data.user, video=video, duration=video.duration) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return UserVideo.get_by_key_name(key) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def count_completed_for_user_data(user_data): <NEW_LINE> <INDENT> return UserVideo.get_completed_user_videos(user_data).count(limit=10000) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_completed_user_videos(user_data): <NEW_LINE> <INDENT> query = UserVideo.all() <NEW_LINE> query.filter("user =", user_data.user) <NEW_LINE> query.filter("completed =", True) <NEW_LINE> return query <NEW_LINE> <DEDENT> user = db.UserProperty() <NEW_LINE> video = db.ReferenceProperty(Video) <NEW_LINE> last_second_watched = db.IntegerProperty(default=0, indexed=False) <NEW_LINE> seconds_watched = db.IntegerProperty(default=0) <NEW_LINE> last_watched = db.DateTimeProperty(auto_now_add=True) <NEW_LINE> duration = db.IntegerProperty(default=0, indexed=False) <NEW_LINE> completed = db.BooleanProperty(default=False) <NEW_LINE> @property <NEW_LINE> def points(self): <NEW_LINE> <INDENT> return points.VideoPointCalculator(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def progress(self): <NEW_LINE> <INDENT> if self.completed: <NEW_LINE> <INDENT> return 1.0 <NEW_LINE> <DEDENT> elif self.duration <= 0: <NEW_LINE> <INDENT> logging.info("UserVideo.duration has invalid value %r, key: %s" % (self.duration, str(self.key()))) <NEW_LINE> return 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return min(1.0, float(self.seconds_watched) / self.duration) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_json(cls, json, user_data): <NEW_LINE> <INDENT> readable_id = json['video']['readable_id'] <NEW_LINE> video = Video.get_for_readable_id(readable_id) <NEW_LINE> return cls( key_name=UserVideo.get_key_name(video, user_data), user=user_data.user, video=video, last_watched=util.parse_iso8601(json['last_watched']), last_second_watched=int(json['last_second_watched']), seconds_watched=int(json['seconds_watched']), duration=int(json['duration']), completed=bool(json['completed']) ) | A single user's interaction with a single video. | 6259905ed7e4931a7ef3d6ad |
class LayerNormLSTMCell(tf.contrib.rnn.RNNCell): <NEW_LINE> <INDENT> def __init__(self, num_units, forget_bias=1.0, use_recurrent_dropout=False, dropout_keep_prob=0.90): <NEW_LINE> <INDENT> self.num_units = num_units <NEW_LINE> self.forget_bias = forget_bias <NEW_LINE> self.use_recurrent_dropout = use_recurrent_dropout <NEW_LINE> self.dropout_keep_prob = dropout_keep_prob <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_size(self): <NEW_LINE> <INDENT> return self.num_units <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_size(self): <NEW_LINE> <INDENT> return self.num_units <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_size(self): <NEW_LINE> <INDENT> return 2 * self.num_units <NEW_LINE> <DEDENT> def get_output(self, state): <NEW_LINE> <INDENT> h, unused_c = tf.split(state, 2, 1) <NEW_LINE> return h <NEW_LINE> <DEDENT> def __call__(self, x, state, timestep=0, scope=None): <NEW_LINE> <INDENT> with tf.variable_scope(scope or type(self).__name__): <NEW_LINE> <INDENT> h, c = tf.split(state, 2, 1) <NEW_LINE> h_size = self.num_units <NEW_LINE> x_size = x.get_shape().as_list()[1] <NEW_LINE> batch_size = x.get_shape().as_list()[0] <NEW_LINE> w_init = None <NEW_LINE> h_init = lstm_ortho_initializer(1.0) <NEW_LINE> w_xh = tf.get_variable( 'W_xh', [x_size, 4 * self.num_units], initializer=w_init) <NEW_LINE> w_hh = tf.get_variable( 'W_hh', [self.num_units, 4 * self.num_units], initializer=h_init) <NEW_LINE> concat = tf.concat([x, h], 1) <NEW_LINE> w_full = tf.concat([w_xh, w_hh], 0) <NEW_LINE> concat = tf.matmul(concat, w_full) <NEW_LINE> concat = layer_norm_all(concat, batch_size, 4, h_size, 'ln_all') <NEW_LINE> i, j, f, o = tf.split(concat, 4, 1) <NEW_LINE> if self.use_recurrent_dropout: <NEW_LINE> <INDENT> g = tf.nn.dropout(tf.tanh(j), self.dropout_keep_prob) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> g = tf.tanh(j) <NEW_LINE> <DEDENT> new_c = c * tf.sigmoid(f + self.forget_bias) + tf.sigmoid(i) * g <NEW_LINE> new_h = tf.tanh(layer_norm(new_c, h_size, 'ln_c')) * tf.sigmoid(o) <NEW_LINE> <DEDENT> return new_h, tf.concat([new_h, new_c], 1) | Layer-Norm, with Ortho Init. and Recurrent Dropout without Memory Loss.
https://arxiv.org/abs/1607.06450 - Layer Norm
https://arxiv.org/abs/1603.05118 - Recurrent Dropout without Memory Loss | 6259905e4428ac0f6e659b90 |
class IntegralType(NumericType, metaclass=DataTypeSingleton): <NEW_LINE> <INDENT> pass | Integral data types. | 6259905ef7d966606f7493e2 |
class bedParse(object): <NEW_LINE> <INDENT> def __init__(self,chrom,start,end,name=None,score=None,strand=None,thickStart=None,thickEnd=None,itemRgb=None,blockCount=None,blockSize=None,blockStarts=None): <NEW_LINE> <INDENT> self.chrom = chrom <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.name = name <NEW_LINE> self.score = score <NEW_LINE> self.strand = strand <NEW_LINE> self.thickStart = thickStart <NEW_LINE> self.thickEnd = thickEnd <NEW_LINE> self.itemRgb = itemRgb <NEW_LINE> self.blockCount = blockCount <NEW_LINE> self.blockSize = blockSize <NEW_LINE> self.blockStarts = blockStarts <NEW_LINE> <DEDENT> def length(self): <NEW_LINE> <INDENT> return self.end - self.start | a class for one line of a bed file | 6259905e21a7993f00c675c0 |
class FileListsXml(XmlStreamedParser, PackageXmlMixIn): <NEW_LINE> <INDENT> def _registerTypes(self): <NEW_LINE> <INDENT> PackageXmlMixIn._registerTypes(self) <NEW_LINE> self._databinder.registerType(_PackageFL, name='package') <NEW_LINE> self._databinder.registerType(_FileLists, name='filelists') | Handle registering all types for parsing filelists.xml files. | 6259905e2c8b7c6e89bd4e42 |
class CmdSaveOnTest(APITestCase): <NEW_LINE> <INDENT> def test_basic(self): <NEW_LINE> <INDENT> result = self.api.cmd_save_on() <NEW_LINE> self._test_for_success(result) | /cmd/save-on | 6259905e01c39578d7f14260 |
class PublicKeyFromLaunchpadChecker(SSHPublicKeyDatabase): <NEW_LINE> <INDENT> credentialInterfaces = ISSHPrivateKeyWithMind, <NEW_LINE> implements(ICredentialsChecker) <NEW_LINE> def __init__(self, authserver): <NEW_LINE> <INDENT> self.authserver = authserver <NEW_LINE> <DEDENT> def checkKey(self, credentials): <NEW_LINE> <INDENT> d = credentials.mind.lookupUserDetails( self.authserver, credentials.username) <NEW_LINE> d.addCallback(self._checkForAuthorizedKey, credentials) <NEW_LINE> d.addErrback(self._reportNoSuchUser, credentials) <NEW_LINE> return d <NEW_LINE> <DEDENT> def _reportNoSuchUser(self, failure, credentials): <NEW_LINE> <INDENT> trap_fault(failure, faults.NoSuchPersonWithName) <NEW_LINE> raise UserDisplayedUnauthorizedLogin( "No such Launchpad account: %s" % credentials.username) <NEW_LINE> <DEDENT> def _checkForAuthorizedKey(self, user_dict, credentials): <NEW_LINE> <INDENT> if credentials.algName == 'ssh-dss': <NEW_LINE> <INDENT> wantKeyType = 'DSA' <NEW_LINE> <DEDENT> elif credentials.algName == 'ssh-rsa': <NEW_LINE> <INDENT> wantKeyType = 'RSA' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if len(user_dict['keys']) == 0: <NEW_LINE> <INDENT> raise UserDisplayedUnauthorizedLogin( "Launchpad user %r doesn't have a registered SSH key" % credentials.username) <NEW_LINE> <DEDENT> for keytype, keytext in user_dict['keys']: <NEW_LINE> <INDENT> if keytype != wantKeyType: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if keytext.decode('base64') == credentials.blob: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> except binascii.Error: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> raise UnauthorizedLogin( "Your SSH key does not match any key registered for Launchpad " "user %s" % credentials.username) | Cred checker for getting public keys from launchpad.
It knows how to get the public keys from the authserver. | 6259905e627d3e7fe0e084de |
class Worker(object): <NEW_LINE> <INDENT> def __init__(self, on_start, on_end, on_result, on_log): <NEW_LINE> <INDENT> self.pid = None <NEW_LINE> self.job = None <NEW_LINE> self.prev_call = None <NEW_LINE> self.callback_on_log = on_log <NEW_LINE> self.callback_on_end = on_end <NEW_LINE> self.callback_on_start = on_start <NEW_LINE> self.callback_on_result = on_result <NEW_LINE> self.protocol = WorkerProtocol(self) <NEW_LINE> def log_line(line): <NEW_LINE> <INDENT> if self.callback_on_log: <NEW_LINE> <INDENT> self.callback_on_log(self, self.job, line) <NEW_LINE> <DEDENT> <DEDENT> self._log_handler = Liner(log_line) <NEW_LINE> <DEDENT> def work_on(self, job): <NEW_LINE> <INDENT> self.push_job(job) <NEW_LINE> if job.call != self.prev_call: <NEW_LINE> <INDENT> self.send_pickled(job.call) <NEW_LINE> self.prev_call = job.call <NEW_LINE> <DEDENT> self.send_pickled((job.args, job.kwargs)) <NEW_LINE> <DEDENT> def on_start(self, pid): <NEW_LINE> <INDENT> self.pid = pid <NEW_LINE> if self.callback_on_start: <NEW_LINE> <INDENT> self.callback_on_start(self) <NEW_LINE> <DEDENT> <DEDENT> def on_end(self, reason): <NEW_LINE> <INDENT> self._log_handler.finish() <NEW_LINE> if self.callback_on_end: <NEW_LINE> <INDENT> v = reason.value <NEW_LINE> self.callback_on_end(self, self.pop_job(), v.exitCode, v.signal) <NEW_LINE> <DEDENT> <DEDENT> def on_result(self, result): <NEW_LINE> <INDENT> self._log_handler.finish() <NEW_LINE> if self.callback_on_result: <NEW_LINE> <INDENT> self.callback_on_result(self, self.pop_job(), result) <NEW_LINE> <DEDENT> <DEDENT> def on_log(self, data): <NEW_LINE> <INDENT> self._log_handler.add(data) <NEW_LINE> <DEDENT> def retire(self): <NEW_LINE> <INDENT> self.send_pickled(None) <NEW_LINE> <DEDENT> def send_signal(self, signal): <NEW_LINE> <INDENT> transport = self.protocol.transport <NEW_LINE> if transport: <NEW_LINE> <INDENT> transport.signalProcess(signal) <NEW_LINE> <DEDENT> <DEDENT> def send_pickled(self, data): <NEW_LINE> <INDENT> transport = self.protocol.transport <NEW_LINE> if transport: <NEW_LINE> <INDENT> transport.write(pickle.dumps(data)) <NEW_LINE> <DEDENT> <DEDENT> def push_job(self, job): <NEW_LINE> <INDENT> self.job, job.worker = job, self <NEW_LINE> <DEDENT> def pop_job(self): <NEW_LINE> <INDENT> job, self.job = self.job, None <NEW_LINE> if job: <NEW_LINE> <INDENT> job.worker = None <NEW_LINE> <DEDENT> return job | A Worker represents a process that persists to work on jobs. | 6259905ed6c5a102081e3776 |
class WorkingDir(object): <NEW_LINE> <INDENT> def __init__(self, *base_dirs): <NEW_LINE> <INDENT> base_dir = os.path.join(*base_dirs) <NEW_LINE> if not os.path.isdir(base_dir): <NEW_LINE> <INDENT> os.makedirs(base_dir) <NEW_LINE> <DEDENT> os.chdir(base_dir) <NEW_LINE> self.base_dir = os.getcwd() <NEW_LINE> self.path = '' <NEW_LINE> <DEDENT> def change(self, *relative): <NEW_LINE> <INDENT> self.path = os.path.join(self.base_dir, *relative) <NEW_LINE> if not os.path.isdir(self.path): <NEW_LINE> <INDENT> os.makedirs(self.path) <NEW_LINE> <DEDENT> <DEDENT> def file(self, file_name): <NEW_LINE> <INDENT> return os.path.join(self.path, file_name) <NEW_LINE> <DEDENT> def exist(self, file_name): <NEW_LINE> <INDENT> return os.path.exists(os.path.join(self.path, file_name)) | 工作目录类
用于切换下载目录和创建目录等。
属性
base_dir:工作目录的根目录,任何时候都基于这个目录;
path:相对于根目录的路径。 | 6259905e9c8ee82313040cb4 |
class CryptPasswordHasher(BasePasswordHasher): <NEW_LINE> <INDENT> algorithm = "crypt" <NEW_LINE> library = "crypt" <NEW_LINE> def salt(self): <NEW_LINE> <INDENT> return get_random_string(2) <NEW_LINE> <DEDENT> def encode(self, password, salt): <NEW_LINE> <INDENT> crypt = self._load_library() <NEW_LINE> assert len(salt) == 2 <NEW_LINE> data = crypt.crypt(password, salt) <NEW_LINE> return "%s$%s$%s" % (self.algorithm, '', data) <NEW_LINE> <DEDENT> def verify(self, password, encoded): <NEW_LINE> <INDENT> crypt = self._load_library() <NEW_LINE> algorithm, salt, data = encoded.split('$', 2) <NEW_LINE> assert algorithm == self.algorithm <NEW_LINE> return constant_time_compare(data, crypt.crypt(password, data)) <NEW_LINE> <DEDENT> def safe_summary(self, encoded): <NEW_LINE> <INDENT> algorithm, salt, data = encoded.split('$', 2) <NEW_LINE> assert algorithm == self.algorithm <NEW_LINE> return SortedDict([ (_('algorithm'), algorithm), (_('salt'), salt), (_('hash'), mask_hash(data, show=3)), ]) | Password hashing using UNIX crypt (not recommended)
The crypt module is not supported on all platforms. | 6259905e32920d7e50bc769a |
class BackupProtectedItemsOperations(object): <NEW_LINE> <INDENT> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self.api_version = "2016-12-01" <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def list( self, vault_name, resource_group_name, filter=None, skip_token=None, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = '/Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupProtectedItems' <NEW_LINE> path_format_arguments = { 'vaultName': self._serialize.url("vault_name", vault_name, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> if filter is not None: <NEW_LINE> <INDENT> query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') <NEW_LINE> <DEDENT> if skip_token is not None: <NEW_LINE> <INDENT> query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.ProtectedItemResourcePaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.ProtectedItemResourcePaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized | BackupProtectedItemsOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An objec model deserializer.
:ivar api_version: Client Api Version. Constant value: "2016-12-01". | 6259905e379a373c97d9a678 |
class InstructionNodeCreator(object): <NEW_LINE> <INDENT> def __init__(self, collection=None, position_tracker=None): <NEW_LINE> <INDENT> if not collection: <NEW_LINE> <INDENT> self._collection = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._collection = collection <NEW_LINE> <DEDENT> self._position_tracer = position_tracker <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return not any(element.text for element in self._collection) <NEW_LINE> <DEDENT> def add_chars(self, *chars): <NEW_LINE> <INDENT> if not chars: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> current_position = self._position_tracer.get_current_position() <NEW_LINE> if (self._collection and self._collection[-1].is_text_node() and not self._position_tracer.is_repositioning_required()): <NEW_LINE> <INDENT> node = self._collection[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node = _InstructionNode(position=current_position) <NEW_LINE> self._collection.append(node) <NEW_LINE> <DEDENT> if self._position_tracer.is_linebreak_required(): <NEW_LINE> <INDENT> self._collection.append(_InstructionNode.create_break( position=current_position)) <NEW_LINE> node = _InstructionNode.create_text(current_position) <NEW_LINE> self._collection.append(node) <NEW_LINE> self._position_tracer.acknowledge_linebreak_consumed() <NEW_LINE> <DEDENT> elif self._position_tracer.is_repositioning_required(): <NEW_LINE> <INDENT> self._collection.append( _InstructionNode.create_repositioning_command( current_position ) ) <NEW_LINE> node = _InstructionNode.create_text(current_position) <NEW_LINE> self._collection.append(node) <NEW_LINE> self._position_tracer.acknowledge_position_changed() <NEW_LINE> <DEDENT> node.add_chars(*chars) <NEW_LINE> <DEDENT> def interpret_command(self, command): <NEW_LINE> <INDENT> self._update_positioning(command) <NEW_LINE> text = COMMANDS.get(command, u'') <NEW_LINE> if u'italic' in text: <NEW_LINE> <INDENT> if u'end' not in text: <NEW_LINE> <INDENT> self._collection.append( _InstructionNode.create_italics_style( self._position_tracer.get_current_position()) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._collection.append( _InstructionNode.create_italics_style( self._position_tracer.get_current_position(), turn_on=False ) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _update_positioning(self, command): <NEW_LINE> <INDENT> if len(command) != 4: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> first, second = command[:2], command[2:] <NEW_LINE> try: <NEW_LINE> <INDENT> positioning = PAC_BYTES_TO_POSITIONING_MAP[first][second] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._position_tracer.update_positioning(positioning) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(_format_italics(self._collection)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_list(cls, stash_list, position_tracker): <NEW_LINE> <INDENT> instance = cls(position_tracker=position_tracker) <NEW_LINE> new_collection = instance._collection <NEW_LINE> for idx, stash in enumerate(stash_list): <NEW_LINE> <INDENT> new_collection.extend(stash._collection) <NEW_LINE> if idx < len(stash_list) - 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> instance._collection[-1].add_chars(u' ') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return instance | Creates _InstructionNode instances from characters and commands, storing
them internally | 6259905ebaa26c4b54d508fa |
class GeneticFunctions(object): <NEW_LINE> <INDENT> def probability_crossover(self): <NEW_LINE> <INDENT> return 0.75 <NEW_LINE> <DEDENT> def probability_mutation(self): <NEW_LINE> <INDENT> return 0.02 <NEW_LINE> <DEDENT> def initial(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def fitness(self, chromosome): <NEW_LINE> <INDENT> return fitness <NEW_LINE> <DEDENT> def check_stop(self, fits_populations): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def parents(self, fits_populations): <NEW_LINE> <INDENT> gen = list(sorted(fits_populations)) <NEW_LINE> while True: <NEW_LINE> <INDENT> f1, ch1 = next(gen) <NEW_LINE> f2, ch2 = next(gen) <NEW_LINE> yield(ch1, ch2) <NEW_LINE> pass <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def crossover(self, parents): <NEW_LINE> <INDENT> return parents <NEW_LINE> <DEDENT> def mutation(self, chromosome): <NEW_LINE> <INDENT> return chromosome | docstring for GeneticFunctions | 6259905e3539df3088ecd8f0 |
class DeferPlugin(object): <NEW_LINE> <INDENT> def pytest_xdist_setupnodes(self, config, specs): <NEW_LINE> <INDENT> print( "\n\nUsing AnyBodyCon: ", config.getoption("--anybodycon") or get_anybodycon_path(), "\n", ) | Simple plugin to defer pytest-xdist hook functions. | 6259905e7047854f46340a15 |
class EditAppointment(View): <NEW_LINE> <INDENT> model = Appointment <NEW_LINE> form_class = AppointmentForm <NEW_LINE> template_name = 'HNApp/edit_appointment.html' <NEW_LINE> def get(self, request, pk): <NEW_LINE> <INDENT> app = Appointment.objects.get(pk=pk) <NEW_LINE> form = self.form_class(None, initial={'datetime': app.datetime, 'patient': app.patient, 'doctor': app.doctor}) <NEW_LINE> app.delete() <NEW_LINE> return render(request, self.template_name, {'form': form}) <NEW_LINE> <DEDENT> def post(self, request, pk): <NEW_LINE> <INDENT> form = self.form_class(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> appointment = form.save(commit=False) <NEW_LINE> datetime = form.cleaned_data['datetime'] <NEW_LINE> patient = form.cleaned_data['patient'] <NEW_LINE> doctor = form.cleaned_data['doctor'] <NEW_LINE> all_appointments = Appointment.objects.all() <NEW_LINE> for app in all_appointments: <NEW_LINE> <INDENT> if app.doctor == doctor: <NEW_LINE> <INDENT> if app.datetime == datetime: <NEW_LINE> <INDENT> return HttpResponseRedirect('time_taken') <NEW_LINE> <DEDENT> <DEDENT> if app.patient == patient: <NEW_LINE> <INDENT> if app.datetime == datetime: <NEW_LINE> <INDENT> return HttpResponseRedirect('time_taken') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> appointment.datetime = datetime <NEW_LINE> appointment.patient = patient <NEW_LINE> appointment.doctor = doctor <NEW_LINE> appointment.save() <NEW_LINE> if appointment is not None: <NEW_LINE> <INDENT> f = open('sys.txt', 'a') <NEW_LINE> sys.stdout = f <NEW_LINE> dt = datetime.strftime('%a, %d %b %Y %H:%M:%S %Z(%z)') <NEW_LINE> tm = time.strftime('%a, %d %b %Y %H:%M:%S %Z(%z)') <NEW_LINE> str = patient.user.username + "made appointment with " + doctor.last_name + " at " + dt + ": " + tm <NEW_LINE> print(str) <NEW_LINE> return redirect('HNApp:appointment_list') <NEW_LINE> <DEDENT> <DEDENT> return render(request, self.template_name, {'form': form}) | TODO | 6259905e3c8af77a43b68a6b |
class Metric: <NEW_LINE> <INDENT> def __init__(self, name, documentation): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.documentation = documentation <NEW_LINE> <DEDENT> def dump_header(self, out): <NEW_LINE> <INDENT> out('/// %s' % self.documentation) <NEW_LINE> out('RawMetric %s;' % self.name) <NEW_LINE> <DEDENT> def initializer(self): <NEW_LINE> <INDENT> return '%s(0)' % (self.name) <NEW_LINE> <DEDENT> def instance_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def dump_metric_info_code(self, out, path, counter): <NEW_LINE> <INDENT> out(' case %s:' % (counter.value())) <NEW_LINE> out(' return {"%s",' % path) <NEW_LINE> out(' &%s};' % path) <NEW_LINE> counter.next() | A single performance metric.
| 6259905e7d847024c075da27 |
class PyDmTree(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/deepmind/tree" <NEW_LINE> pypi = "dm-tree/dm-tree-0.1.5.tar.gz" <NEW_LINE> maintainers = ['aweits'] <NEW_LINE> version('0.1.5', sha256='a951d2239111dfcc468071bc8ff792c7b1e3192cab5a3c94d33a8b2bda3127fa') <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('bazel', type='build') <NEW_LINE> depends_on('[email protected]:', type=('build', 'run')) <NEW_LINE> @run_after('install') <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> remove_linked_tree(self.tmp_path) <NEW_LINE> <DEDENT> def patch(self): <NEW_LINE> <INDENT> self.tmp_path = tempfile.mkdtemp(dir='/tmp', prefix='spack') <NEW_LINE> env['TEST_TMPDIR'] = self.tmp_path <NEW_LINE> env['HOME'] = self.tmp_path <NEW_LINE> args = [ "'--nohome_rc',\n", "'--nosystem_rc',\n", "'--output_user_root={0}',\n".format(self.tmp_path), "'build',\n", "'--color=no',\n", "'--jobs={0}',\n".format(make_jobs), "'--verbose_failures',\n", "'--subcommands=pretty_print',\n", "'--spawn_strategy=local',\n", "'--explain=explainlogfile.txt',\n", "'--verbose_explanations',\n", "'--action_env', 'PYTHONPATH={0}',\n".format(env['PYTHONPATH']), ] <NEW_LINE> filter_file("'build',", ' '.join(args), 'setup.py') | tree is a library for working with nested data structures. In a
way, tree generalizes the builtin map() function which only
supports flat sequences, and allows to apply a function to each
leaf preserving the overall structure. | 6259905e0a50d4780f7068e9 |
class ICodeDiscountableMarker(Interface): <NEW_LINE> <INDENT> pass | Discount Interface | 6259905ed486a94d0ba2d61d |
class TwoLayerNet(object): <NEW_LINE> <INDENT> def __init__(self, input_dim=3*32*32, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0): <NEW_LINE> <INDENT> self.params = {} <NEW_LINE> self.reg = reg <NEW_LINE> self.params['W1'] = weight_scale * np.random.randn(input_dim, hidden_dim) <NEW_LINE> self.params['b1'] = np.zeros(hidden_dim) <NEW_LINE> self.params['W2'] = weight_scale * np.random.randn(hidden_dim, num_classes) <NEW_LINE> self.params['b2'] = np.zeros(num_classes) <NEW_LINE> <DEDENT> def loss(self, X, y=None): <NEW_LINE> <INDENT> W1, b1 = self.params['W1'], self.params['b1'] <NEW_LINE> W2, b2 = self.params['W2'], self.params['b2'] <NEW_LINE> N = X.shape[0] <NEW_LINE> first_out, first_cache = affine_relu_forward(X, W1, b1) <NEW_LINE> scores, cache = affine_forward(first_out, W2, b2) <NEW_LINE> if y is None: <NEW_LINE> <INDENT> return scores <NEW_LINE> <DEDENT> loss, dout = softmax_loss(scores, y) <NEW_LINE> dfirst_out, dW2, db2 = affine_backward(dout, cache) <NEW_LINE> dX, dW1, db1 = affine_relu_backward(dfirst_out, first_cache) <NEW_LINE> dW1 += self.reg * W1 <NEW_LINE> dW2 += self.reg * W2 <NEW_LINE> loss += 0.5 * self.reg * (np.sum(W1**2) + np.sum(W2**2)) <NEW_LINE> grads = {} <NEW_LINE> grads['W1'], grads['b1'] = dW1, db1 <NEW_LINE> grads['W2'], grads['b2'] = dW2, db2 <NEW_LINE> return loss, grads | A two-layer fully-connected neural network with ReLU nonlinearity and
softmax loss that uses a modular layer design. We assume an input dimension
of D, a hidden dimension of H, and perform classification over C classes.
The architecure should be affine - relu - affine - softmax.
Note that this class does not implement gradient descent; instead, it
will interact with a separate Solver object that is responsible for running
optimization.
The learnable parameters of the model are stored in the dictionary
self.params that maps parameter names to numpy arrays. | 6259905ecc0a2c111447c5f9 |
class DNAChain(PlottableSequence): <NEW_LINE> <INDENT> def __init__(self, genome: str, chain: int = 0): <NEW_LINE> <INDENT> self.basepairs_chain0 = self._makeFromGenome(genome, chain=chain) <NEW_LINE> self.basepairs = self.basepairs_chain0 <NEW_LINE> self.center_in_z() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _makeFromGenome(genome: str, chain: int = 0): <NEW_LINE> <INDENT> dnachain = [] <NEW_LINE> position = np.array([0, 0, 0], dtype=float) <NEW_LINE> rotation = np.array([0, 0, 0], dtype=float) <NEW_LINE> index = 0 <NEW_LINE> for char in genome: <NEW_LINE> <INDENT> dnachain.append( basepair.BasePair( char, chain=chain, position=position, rotation=rotation, index=index ) ) <NEW_LINE> position += np.array([0.0, 0.0, BP_SEPARATION]) <NEW_LINE> rotation += np.array([0.0, 0.0, BP_ROTATION]) <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> return dnachain <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _turnAndTwistChain(chain, twist=0.0): <NEW_LINE> <INDENT> zmax = 0 <NEW_LINE> zmin = 0 <NEW_LINE> for pair in chain: <NEW_LINE> <INDENT> if pair.position[2] < zmin: <NEW_LINE> <INDENT> zmin = pair.position[2] <NEW_LINE> <DEDENT> elif pair.position[2] > zmax: <NEW_LINE> <INDENT> zmax = pair.position[2] <NEW_LINE> <DEDENT> <DEDENT> zrange = zmax - zmin <NEW_LINE> radius = 2.0 * zrange / np.pi <NEW_LINE> for pair in chain: <NEW_LINE> <INDENT> theta = np.pi / 2.0 * (pair.position[2] - zmin) / zrange <NEW_LINE> new_origin = np.array( [radius * (1 - np.cos(theta)), 0.0, radius * np.sin(theta) - radius] ) <NEW_LINE> yang = np.pi / 2.0 * (pair.position[2] - zmin) / zrange <NEW_LINE> pair.rotate(np.array([0, yang, 0]), about_origin=True) <NEW_LINE> xang = twist * (pair.position[2] - zmin) / zrange <NEW_LINE> chain_z_axis = pair.rmatrix[:, 2] <NEW_LINE> rmatrix = r.rot_ax_angle(chain_z_axis, xang) <NEW_LINE> pair.rotate(rmatrix, about_origin=True) <NEW_LINE> pair.translate(new_origin - pair.position) <NEW_LINE> <DEDENT> return chain <NEW_LINE> <DEDENT> def center_in_z(self): <NEW_LINE> <INDENT> minz = 0 <NEW_LINE> maxz = 0 <NEW_LINE> for bp in self.basepairs: <NEW_LINE> <INDENT> for (name, mol) in bp.iterMolecules(): <NEW_LINE> <INDENT> if mol.position[2] < minz: <NEW_LINE> <INDENT> minz = mol.position[2] <NEW_LINE> <DEDENT> elif mol.position[2] > maxz: <NEW_LINE> <INDENT> maxz = mol.position[2] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> ztrans = (minz - maxz) / 2.0 - minz <NEW_LINE> translation = np.array([0.0, 0.0, ztrans]) <NEW_LINE> for bp in self.basepairs: <NEW_LINE> <INDENT> bp.translate(translation) <NEW_LINE> <DEDENT> return None | *Inherits from PlottableSequence*
A single DNA Chain built from a genome specified.
:param genome: A string specifying the genome, e.g. 'AGTATC'
:param chain: The Chain index to label this strand | 6259905e627d3e7fe0e084e0 |
class AlcaHarvest(Builder): <NEW_LINE> <INDENT> def build(self, step, workingDir, **args): <NEW_LINE> <INDENT> stepName = nodeName(step) <NEW_LINE> stepWorkingArea = "%s/%s" % (workingDir, stepName) <NEW_LINE> self.installWorkingArea(step, stepWorkingArea) <NEW_LINE> print("Builders.AlcaHarvest.build called on %s" % stepName) | _AlcaHarvest_
Build a working area for a AlcaHarvest step | 6259905e4e4d562566373a5c |
class Command(BaseCommand): <NEW_LINE> <INDENT> def delete(self): <NEW_LINE> <INDENT> write('Deleting Synonyms') <NEW_LINE> for item in Synonym.objects.all(): <NEW_LINE> <INDENT> item.delete() <NEW_LINE> <DEDENT> write('Deleting Lookuplists') <NEW_LINE> for model in lookuplists.lookuplists(): <NEW_LINE> <INDENT> for item in model.objects.all(): <NEW_LINE> <INDENT> item.delete() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def handle(self, *args, **kw): <NEW_LINE> <INDENT> self.delete() | Management command to delete all lookuplists and related
synonyms. | 6259905ecb5e8a47e493ccb0 |
class PictureSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> photo_url = serializers.SerializerMethodField("get_photo_url") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ("photo", "photo_url") <NEW_LINE> <DEDENT> def get_photo_url(self, obj): <NEW_LINE> <INDENT> return obj.photo.url | [summary]
[description]
Extends:
serializers.ModelSerializer
Variables:
photo_url {[type]} -- [description] | 6259905ee64d504609df9ef9 |
class VersionedAttribute(AttributeBase): <NEW_LINE> <INDENT> class_id = 'VA' <NEW_LINE> class_name = 'versioned attribute' <NEW_LINE> class_data_len = 4 <NEW_LINE> sort_type = 'attribute' <NEW_LINE> def __init__(self, parent, name = None, atype = None, value = None, max_versions = 1): <NEW_LINE> <INDENT> super(VersionedAttribute, self).__init__( parent, name, atype, value ) <NEW_LINE> self.max_versions = max_versions <NEW_LINE> self.values = [] <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> self.values = [value] <NEW_LINE> <DEDENT> <DEDENT> def getParentNode(self): <NEW_LINE> <INDENT> parent = self <NEW_LINE> while parent.class_id == 'VA': <NEW_LINE> <INDENT> parent = parent.parent <NEW_LINE> <DEDENT> return parent <NEW_LINE> <DEDENT> def _created(self): <NEW_LINE> <INDENT> self.oid = self.transport.add(self.parent.oid, self.name, self.atype, self._value, self.max_versions) <NEW_LINE> <DEDENT> def _loaded(self, node_data): <NEW_LINE> <INDENT> super(VersionedAttribute, self)._loaded(node_data) <NEW_LINE> self.name = node_data['data'][0] <NEW_LINE> self.atype = node_data['data'][1] <NEW_LINE> self.values = node_data['data'][2] <NEW_LINE> self.max_versions = node_data['data'][3] <NEW_LINE> <DEDENT> def _get_value(self): <NEW_LINE> <INDENT> return self.values[-1] <NEW_LINE> <DEDENT> def _set_value(self, val): <NEW_LINE> <INDENT> self.transport.setValue(self.oid, val) <NEW_LINE> self.values.append(val) <NEW_LINE> if len(self.values) > self.max_versions: <NEW_LINE> <INDENT> self.values.pop(0) <NEW_LINE> <DEDENT> <DEDENT> value = property(_get_value, _set_value) <NEW_LINE> def _copySelf(self, target): <NEW_LINE> <INDENT> return target.add('versioned attribute', self.name, self.atype, self.value, self.max_versions) | A single attribute.
Attributes are used by pretty much every other object type.
They are used to store varying types of data.
Arguments:
name : the attribute name.
atype : the attribute type, one of:
text : a unicode string.
binary : a string of binary data (no encoding/decoding performed)
int : an integer
bool : True/False
value : a value matching the attributes type. | 6259905e0fa83653e46f653c |
class C3(parametertools.SingleParameter): <NEW_LINE> <INDENT> NDIM, TYPE, TIME, SPAN = 0, float, None, (0., .5) | Third coefficient of the muskingum working formula [-]. | 6259905ea79ad1619776b5e8 |
class GitError(Exception): <NEW_LINE> <INDENT> pass | Raised if there is an issue with the local git configuration. | 6259905e67a9b606de5475cc |
class TestAPIError(unittest.TestCase): <NEW_LINE> <INDENT> def test_traceback_is_added(self): <NEW_LINE> <INDENT> exception_message = 'Test exception' <NEW_LINE> try: <NEW_LINE> <INDENT> raise Exception(exception_message) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> raise APIError(404, 'API error') <NEW_LINE> <DEDENT> except APIError as exc: <NEW_LINE> <INDENT> nose.tools.assert_in(exception_message, exc.traceback) | Test APIError functionality
| 6259905e4e4d562566373a5d |
class MyBM25Transformer(BM25Transformer): <NEW_LINE> <INDENT> def fit(self, x, y=None): <NEW_LINE> <INDENT> super().fit(x) | To be used in sklearn pipeline, transformer.fit()
needs to be able to accept a "y" argument | 6259905e99cbb53fe6832536 |
class BoardClear(BoardTest): <NEW_LINE> <INDENT> def test_clear(self): <NEW_LINE> <INDENT> for name, board in self.boards: <NEW_LINE> <INDENT> board.populate(self.test_data) <NEW_LINE> self.assertNotEqual(list(board.iterdata()), [], name) <NEW_LINE> board.clear() <NEW_LINE> expected = [] <NEW_LINE> actual = list(board.iterdata()) <NEW_LINE> self.assertEqual(expected, actual, name) <NEW_LINE> <DEDENT> <DEDENT> def test_clear_offset_board(self): <NEW_LINE> <INDENT> for name, board in self.boards: <NEW_LINE> <INDENT> if any(len(d) == 1 for d in board.dimensions): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> board.populate(self.test_data) <NEW_LINE> offset = tuple(1 for _ in board.dimensions) <NEW_LINE> coord_slices = tuple(slice(o, None) for o in offset) <NEW_LINE> board2 = board[coord_slices] <NEW_LINE> self.assertNotEqual(list(board.iterdata()), [], name) <NEW_LINE> self.assertNotEqual(list(board2.iterdata()), [], name) <NEW_LINE> board2.clear() <NEW_LINE> self.assertNotEqual(list(board.iterdata()), [], name) <NEW_LINE> self.assertEqual(list(board2.iterdata()), [], name) | Clearing the board removes all the data visible to the local board.
That is, if this is a subboard of some larger board, only those items
which fall within the local coordinate space are removed; | 6259905e32920d7e50bc769d |
class User(db.Model, UserMixin): <NEW_LINE> <INDENT> id = Column(Integer, primary_key=True) <NEW_LINE> email = Column(Text, unique=True) <NEW_LINE> password = Column(Text) <NEW_LINE> active = Column(Text) <NEW_LINE> roles = relationship( 'Role', secondary=roles_users, backref=db.backref('users', lazy='dynamic') ) | User of proteomics database. | 6259905e2ae34c7f260ac73d |
class MissingQueryParameter(FilesException): <NEW_LINE> <INDENT> code = 400 <NEW_LINE> description = "Missing required query argument '{arg_name}'" <NEW_LINE> def __init__(self, arg_name, **kwargs): <NEW_LINE> <INDENT> self.arg_name = arg_name <NEW_LINE> super(MissingQueryParameter, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def get_description(self, environ=None): <NEW_LINE> <INDENT> return self.description.format(arg_name=self.arg_name) | Exception raised when missing a query parameter. | 6259905e0c0af96317c5788a |
class HouseStatus(Service): <NEW_LINE> <INDENT> version = (1, 0) <NEW_LINE> serviceType = 'urn:schemas-upnp-org:service:HouseStatus:1' <NEW_LINE> serviceId = 'urn:schemas-upnp-org:serviceId:HouseStatus' <NEW_LINE> serviceUrl = 'house' <NEW_LINE> type = 'House' <NEW_LINE> subscription_timeout_range = (None, None) <NEW_LINE> def __init__(self, xmlfile, client, name='Application'): <NEW_LINE> <INDENT> super(HouseStatus, self).__init__( self.type, self.serviceType, xml=xmlfile, client=client, appname=name) <NEW_LINE> self.log = Logger() <NEW_LINE> self.client = client <NEW_LINE> self.client.houses.append(self) <NEW_LINE> self.occupancystate = 'Indeterminate' <NEW_LINE> self.activitylevel = 'Regular' <NEW_LINE> self.dormancylevel = 'Regular' <NEW_LINE> <DEDENT> def upnp_event(self, evt, var): <NEW_LINE> <INDENT> self.log.debug('away event: %s ==> %s' % (var, evt)) <NEW_LINE> setattr(self, var, evt) | classdocs | 6259905e6e29344779b01ca5 |
class IRecordModifiedEvent(IRecordEvent): <NEW_LINE> <INDENT> oldValue = schema.Field(title=u'The record\'s previous value') <NEW_LINE> newValue = schema.Field(title=u'The record\'s new value') | Event fired when a record's value is modified.
| 6259905e7d43ff2487427f3b |
class CoreFeatureSchema(FeatureSchema): <NEW_LINE> <INDENT> env = fields.Nested(EnvSchema(), default=EnvSchema()) <NEW_LINE> domain = fields.Nested(DomainSchema(), default=DomainSchema()) <NEW_LINE> project = fields.Nested(ProjectSchema(), default=ProjectSchema()) <NEW_LINE> os = fields.String(required=True, default=os.name) <NEW_LINE> path = fields.Nested(PathSchema(), default=PathSchema()) <NEW_LINE> process = fields.Dict(fields.String(), fields.Nested(ProcessSchema()), default={}) <NEW_LINE> configuration = fields.Nested(ConfigurationSchema(), default=ConfigurationSchema()) <NEW_LINE> github_repository = fields.String(required=True, default="inetum-orleans/docker-devbox-ddb") <NEW_LINE> check_updates = fields.Boolean(required=True, default=True) <NEW_LINE> required_version = fields.String(required=False, allow_none=True, default=None) <NEW_LINE> release_asset_name = fields.String(required=False, allow_none=True, default=None) | Core feature schema. | 6259905e3d592f4c4edbc532 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.