code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class RGBAColorMapper(object): <NEW_LINE> <INDENT> def __init__(self, low, high, palette): <NEW_LINE> <INDENT> self.range = np.linspace(low, high, len(palette)) <NEW_LINE> self.r, self.g, self.b = list(zip(*[hex_to_rgb(i) for i in palette])) <NEW_LINE> <DEDENT> def color(self, data, alpha=255): <NEW_LINE> <INDENT> red = np.interp(data, self.range, self.r) <NEW_LINE> blue = np.interp(data, self.range, self.b) <NEW_LINE> green = np.interp(data, self.range, self.g) <NEW_LINE> red[np.isnan(red)] = 240 <NEW_LINE> blue[np.isnan(blue)] = 240 <NEW_LINE> green[np.isnan(green)] = 240 <NEW_LINE> colors = np.dstack([red.astype(np.uint8), green.astype(np.uint8), blue.astype(np.uint8), np.full_like(data, alpha, dtype=np.uint8)]) <NEW_LINE> return colors.view(dtype=np.uint32).reshape(data.shape) | Maps floating point values to rgb values over a palette
@author: Christine Doig | 6259908166673b3332c31ee3 |
@attrs <NEW_LINE> class Filename: <NEW_LINE> <INDENT> name = attrib() <NEW_LINE> read_flags = attrib(default=Factory(lambda: 'r')) <NEW_LINE> write_flags = attrib(default=Factory(lambda: 'w')) <NEW_LINE> file_like = attrib(default=Factory(bool)) <NEW_LINE> def read(self): <NEW_LINE> <INDENT> if self.file_like: <NEW_LINE> <INDENT> data = self.name.read() <NEW_LINE> self.name.seek(0) <NEW_LINE> return data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with open(self.name, self.read_flags) as f: <NEW_LINE> <INDENT> return f.read() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if self.file_like: <NEW_LINE> <INDENT> return self.name.write(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with open(self.name, self.write_flags) as f: <NEW_LINE> <INDENT> return f.write(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def exists(self): <NEW_LINE> <INDENT> return self.file_like or ( self.name is not None and os.path.isfile(self.name) ) | A filename instance.
name
The name of the file (or a file-like object).
read_flags
The flags to use when opening the file for reading.
write_flags
The flags used when opening the file for writing.
file_like
A boolean value specifying whether or not name is a file-like object. | 62599081d486a94d0ba2da9b |
class WallAnt(Ant): <NEW_LINE> <INDENT> name = "Wall" <NEW_LINE> food_cost = 4 <NEW_LINE> armor = 4 <NEW_LINE> implemented = True <NEW_LINE> def __init__(self, armor = 4): <NEW_LINE> <INDENT> Insect.__init__(self, armor) | WallAnt that does nothing to damage but has a high armor | 62599081656771135c48ada2 |
class BadFilename(ReportableException): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> super(BadFilename, self).__init__("Invalid filename: %s" % (filename)) | Reports invalid filename | 6259908176e4537e8c3f1063 |
class GDepTotalStatistic(NestingStatisticGraphWise): <NEW_LINE> <INDENT> def _compute(self, g): <NEW_LINE> <INDENT> if g not in self: <NEW_LINE> <INDENT> self[g] = self._compute_nocache(g, set()) <NEW_LINE> <DEDENT> return self[g] <NEW_LINE> <DEDENT> def _compute_nocache(self, g, path): <NEW_LINE> <INDENT> if g in path: <NEW_LINE> <INDENT> return OrderedSet() <NEW_LINE> <DEDENT> all_deps = self.manager.graph_dependencies_prox <NEW_LINE> deps = all_deps[g] <NEW_LINE> parents = OrderedSet() <NEW_LINE> for dep in deps: <NEW_LINE> <INDENT> if isinstance(dep, ParentProxy): <NEW_LINE> <INDENT> parents |= self._compute_nocache(dep.graph, path | {g}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parents.add(dep) <NEW_LINE> <DEDENT> <DEDENT> parents.discard(g) <NEW_LINE> return parents | Implements `GraphManager.graph_dependencies_total`. | 62599081ad47b63b2c5a9335 |
@ns_measurement.route('/historical/<string:unique_id>/<string:unit>/<int:channel>/<int:epoch_start>/<int:epoch_end>') <NEW_LINE> @ns_measurement.doc( security='apikey', responses=default_responses, params={ 'unique_id': 'The unique ID of the measurement', 'unit': 'The unit of the measurement', 'channel': 'The channel of the measurement', 'epoch_start': 'The start time, as epoch. Set to 0 for none.', 'epoch_end': 'The end time, as epoch. Set to 0 for none.' } ) <NEW_LINE> class MeasurementsHistorical(Resource): <NEW_LINE> <INDENT> @accept('application/vnd.mycodo.v1+json') <NEW_LINE> @ns_measurement.marshal_with(measurement_list_fields) <NEW_LINE> @flask_login.login_required <NEW_LINE> def get(self, unique_id, unit, channel, epoch_start, epoch_end): <NEW_LINE> <INDENT> if not utils_general.user_has_permission('view_settings'): <NEW_LINE> <INDENT> abort(403) <NEW_LINE> <DEDENT> if unit not in add_custom_units(Unit.query.all()): <NEW_LINE> <INDENT> abort(422, custom='Unit ID not found') <NEW_LINE> <DEDENT> if channel < 0: <NEW_LINE> <INDENT> abort(422, custom='channel must be >= 0') <NEW_LINE> <DEDENT> if epoch_start < 0 or epoch_end < 0: <NEW_LINE> <INDENT> abort(422, custom='epoch_start and epoch_end must be >= 0') <NEW_LINE> <DEDENT> utc_offset_timedelta = datetime.datetime.utcnow() - datetime.datetime.now() <NEW_LINE> if epoch_start: <NEW_LINE> <INDENT> start = datetime.datetime.fromtimestamp(float(epoch_start)) <NEW_LINE> start += utc_offset_timedelta <NEW_LINE> start_str = start.strftime('%Y-%m-%dT%H:%M:%S.%fZ') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start_str = None <NEW_LINE> <DEDENT> if epoch_end: <NEW_LINE> <INDENT> end = datetime.datetime.fromtimestamp(float(epoch_end)) <NEW_LINE> end += utc_offset_timedelta <NEW_LINE> end_str = end.strftime('%Y-%m-%dT%H:%M:%S.%fZ') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end_str = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return_ = read_influxdb_list( unique_id, unit, channel, start_str=start_str, end_str=end_str) <NEW_LINE> if return_ and len(return_) > 0: <NEW_LINE> <INDENT> dict_return = {'measurements': []} <NEW_LINE> for each_set in return_: <NEW_LINE> <INDENT> dict_return['measurements'].append( {'time': each_set[0], 'value': each_set[1]}) <NEW_LINE> <DEDENT> return dict_return, 200 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return return_, 200 <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> abort(500, message='An exception occurred', error=traceback.format_exc()) | Interacts with Measurement settings in the SQL database. | 62599081bf627c535bcb2fb6 |
class _Convertible(object): <NEW_LINE> <INDENT> def __init__(self, enclosing_graph): <NEW_LINE> <INDENT> self._enclosing_graph = enclosing_graph <NEW_LINE> self._outgoing_edges = [] <NEW_LINE> self._converted_self = None <NEW_LINE> <DEDENT> def converted_self(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def convert_variable_to_constant(self, incoming_edge, tensor_data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def create_edges(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def add_outgoing_edge(self, edge): <NEW_LINE> <INDENT> self._outgoing_edges.append(edge) <NEW_LINE> <DEDENT> @property <NEW_LINE> def converted_enclosing_graph(self): <NEW_LINE> <INDENT> return self._enclosing_graph.converted_self() <NEW_LINE> <DEDENT> @property <NEW_LINE> def outgoing_edges(self): <NEW_LINE> <INDENT> return self._outgoing_edges | An entity that can have variables converted to constants. | 62599081f9cc0f698b1c603e |
class WrongModeError(Exception): <NEW_LINE> <INDENT> pass | Exception in the full url generation process | 625990815fdd1c0f98e5fa64 |
class DevelopmentConfig(BaseConfig): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'mysql+mysqldb://jobplus:jobplus@localhost:3306/jobplus?charset=utf8' | 开发环境配置 | 6259908144b2445a339b76cf |
class ProjectMapLocationResource(MapLocationResource): <NEW_LINE> <INDENT> class Meta(MapLocationResource.Meta): <NEW_LINE> <INDENT> queryset = ProjectLocation.objects.all() <NEW_LINE> resource_name = 'project_map_location' | A Location resource optimized for use by many-pin maps | 62599081aad79263cf43029f |
class ParameterMapper(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, default_values, mapping_func): <NEW_LINE> <INDENT> self._default_values = default_values <NEW_LINE> self._mapping_func = mapping_func <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def map(self, value): <NEW_LINE> <INDENT> raise NotImplementedError() | defines an interface for mapping data parameter to sonic parameter | 625990817d847024c075dec2 |
class KWSSigner: <NEW_LINE> <INDENT> def __init__(self, secret_key): <NEW_LINE> <INDENT> self.secret_key = ascii_to_hashable(secret_key) <NEW_LINE> <DEDENT> def sign_with_content_md5(self, method, content_md5, content_type, date, request_path): <NEW_LINE> <INDENT> toSign = ascii_to_hashable(self.secret_key)+EOL+EOL <NEW_LINE> toSign += ascii_to_hashable(method)+EOL <NEW_LINE> toSign += ascii_to_hashable(content_md5)+EOL <NEW_LINE> if content_type is not None: <NEW_LINE> <INDENT> toSign += ascii_to_hashable(content_type)+EOL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> toSign += EOL <NEW_LINE> <DEDENT> toSign += ascii_to_hashable(date)+EOL <NEW_LINE> toSign += ascii_to_hashable(request_path) <NEW_LINE> return compute_sha1_base64(toSign) <NEW_LINE> <DEDENT> def sign_with_content(self, method, content, content_type, date, request_path): <NEW_LINE> <INDENT> content_md5 = compute_md5_hex(to_hashable(content)) <NEW_LINE> return self.sign_with_content_md5(method, content_md5, content_type, date, request_path) <NEW_LINE> <DEDENT> def sign_with_no_content(self, method, content_type, date, request_path): <NEW_LINE> <INDENT> return self.sign_with_content_md5(method, "", content_type, date, request_path) | Class generating KWS request signatures. | 625990815fcc89381b266ecf |
class NinjaAnt(Ant): <NEW_LINE> <INDENT> name = 'Ninja' <NEW_LINE> damage = 1 <NEW_LINE> food_cost = 5 <NEW_LINE> armor = 1 <NEW_LINE> blocks_path = False <NEW_LINE> implemented = True <NEW_LINE> def action(self, colony): <NEW_LINE> <INDENT> bee_list = [b for b in self.place.bees] <NEW_LINE> for i in bee_list: <NEW_LINE> <INDENT> i.reduce_armor(self.damage) | NinjaAnt does not block the path and damages all bees in its place. | 6259908126068e7796d4e426 |
class test_gettempdir(TC): <NEW_LINE> <INDENT> def test_directory_exists(self) -> None: <NEW_LINE> <INDENT> dir = tempfile.gettempdir() <NEW_LINE> self.assertTrue(os.path.isabs(dir) or dir == os.curdir, "%s is not an absolute path" % dir) <NEW_LINE> self.assertTrue(os.path.isdir(dir), "%s is not a directory" % dir) <NEW_LINE> <DEDENT> def test_directory_writable(self) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> file = tempfile.NamedTemporaryFile() <NEW_LINE> file.write(b"blat") <NEW_LINE> file.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.failOnException("create file in %s" % tempfile.gettempdir()) <NEW_LINE> <DEDENT> <DEDENT> def test_same_thing(self) -> None: <NEW_LINE> <INDENT> a = tempfile.gettempdir() <NEW_LINE> b = tempfile.gettempdir() <NEW_LINE> self.assertTrue(a is b) | Test gettempdir(). | 625990812c8b7c6e89bd52cb |
class OperatingHours(models.Model): <NEW_LINE> <INDENT> day = models.IntegerField(choices = DAYS, db_index = True) <NEW_LINE> opens = models.TimeField() <NEW_LINE> closes = models.TimeField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True | Abstract class for a place's operating hours | 625990815166f23b2e244ebe |
class SilentZEOServer(ZEOServer): <NEW_LINE> <INDENT> def setup_default_logging(self): <NEW_LINE> <INDENT> pass | A ZEO Server that doesn't write on the console | 62599081a8370b77170f1eb7 |
class TagViewSet(BaseRacipeAttrViewSet): <NEW_LINE> <INDENT> queryset = Tag.objects.all() <NEW_LINE> serializer_class = serializers.TagSerializer | Manage tags in the database | 6259908166673b3332c31ee5 |
class VTGateClient(object): <NEW_LINE> <INDENT> def __init__(self, addr, timeout, *pargs, **kwargs): <NEW_LINE> <INDENT> super(VTGateClient, self).__init__(*pargs, **kwargs) <NEW_LINE> self.addr = addr <NEW_LINE> self.timeout = timeout <NEW_LINE> self.session = None <NEW_LINE> <DEDENT> def dial(self): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def is_closed(self): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def cursor(self, *pargs, **kwargs): <NEW_LINE> <INDENT> cursorclass = kwargs.pop('cursorclass', None) or vtgate_cursor.VTGateCursor <NEW_LINE> return cursorclass(self, *pargs, **kwargs) <NEW_LINE> <DEDENT> def begin(self, effective_caller_id=None, single_db=False): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def commit(self, twopc=False): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def rollback(self): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def _execute(self, sql, bind_variables, tablet_type, keyspace_name=None, shards=None, keyspace_ids=None, keyranges=None, entity_keyspace_id_map=None, entity_column_name=None, not_in_transaction=False, effective_caller_id=None, include_event_token=False, compare_event_token=None, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def _execute_batch( self, sql_list, bind_variables_list, tablet_type, keyspace_list=None, shards_list=None, keyspace_ids_list=None, as_transaction=False, effective_caller_id=None, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def _stream_execute( self, sql, bind_variables, tablet_type, keyspace=None, shards=None, keyspace_ids=None, keyranges=None, effective_caller_id=None, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def get_srv_keyspace(self, keyspace): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def update_stream(self, keyspace_name, tablet_type, timestamp=None, event=None, shard=None, key_range=None, effective_caller_id=None, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def message_stream(self, keyspace, name, shard=None, key_range=None, effective_caller_id=None, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') <NEW_LINE> <DEDENT> def message_ack(self, name, ids, keyspace=None, effective_caller_id=None, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Child class needs to implement this') | VTGateClient is the interface for the vtgate client implementations.
All implementations must implement all these methods.
If something goes wrong with the connection, this object will be thrown out.
FIXME(alainjobart) transactional state (the Session object) is currently
maintained by this object. It should be maintained by the cursor, and just
returned / passed in with every method that makes sense. | 6259908123849d37ff852b9f |
class Key(object): <NEW_LINE> <INDENT> def __init__(self, key_type, pub=None, priv=None, key=None): <NEW_LINE> <INDENT> self.key_type = key_type <NEW_LINE> if pub or priv: <NEW_LINE> <INDENT> if pub is not None and len(pub) != key_type.pubkey_len: <NEW_LINE> <INDENT> raise ValueError('pub key material is wrong length: %d instead of %d' % (len(pub), key_type.pubkey_len)) <NEW_LINE> <DEDENT> if priv is not None and len(priv) != key_type.privkey_len: <NEW_LINE> <INDENT> raise ValueError('priv key material is wrong length: %d instead of %d' % (len(priv), key_type.privkey_len)) <NEW_LINE> <DEDENT> key = self._parse(pub, priv) <NEW_LINE> <DEDENT> elif key is None: <NEW_LINE> <INDENT> key = self._generate() <NEW_LINE> <DEDENT> if isinstance(key, bytes): <NEW_LINE> <INDENT> raise TypeError('Pass in key material with the pub= and priv= kwargs') <NEW_LINE> <DEDENT> self.key = key <NEW_LINE> <DEDENT> def has_private(self): <NEW_LINE> <INDENT> return self._has_private() <NEW_LINE> <DEDENT> def to_public(self): <NEW_LINE> <INDENT> if self.has_private(): <NEW_LINE> <INDENT> return self._to_public() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> <DEDENT> def get_pubkey(self): <NEW_LINE> <INDENT> return self._get_pubkey() <NEW_LINE> <DEDENT> def get_privkey(self): <NEW_LINE> <INDENT> if not self.has_private(): <NEW_LINE> <INDENT> raise TypeError('Private key not available in this object') <NEW_LINE> <DEDENT> return self._get_privkey() | Base class for keys. | 62599081bf627c535bcb2fb8 |
class TwitchIRC(object): <NEW_LINE> <INDENT> def __init__(self, channels=[]): <NEW_LINE> <INDENT> self.channels = channels <NEW_LINE> self._con = None <NEW_LINE> self._data = "" <NEW_LINE> <DEDENT> def login(self): <NEW_LINE> <INDENT> self._connect_socket() <NEW_LINE> self._con.send(bytes('PASS %s\r\n' % PASS, 'UTF-8')) <NEW_LINE> self._con.send(bytes('NICK %s\r\n' % NICK, 'UTF-8')) <NEW_LINE> for chan in self.channels: <NEW_LINE> <INDENT> self._con.send(bytes('JOIN %s\r\n' % chan, 'UTF-8')) <NEW_LINE> <DEDENT> <DEDENT> def get_message(self): <NEW_LINE> <INDENT> new_data = self._con.recv(1024).decode('utf-8', 'ignore') <NEW_LINE> self._data = self._data + new_data <NEW_LINE> data_split = re.split(r"[~\r\n]+", self._data) <NEW_LINE> self._data = data_split.pop() <NEW_LINE> for line in data_split: <NEW_LINE> <INDENT> line = str.rstrip(line) <NEW_LINE> line = str.split(line) <NEW_LINE> if len(line) >= 1: <NEW_LINE> <INDENT> if line[0] == 'PING': <NEW_LINE> <INDENT> self._send_pong(line[1]) <NEW_LINE> return self.get_message() <NEW_LINE> <DEDENT> if line[1] == 'PRIVMSG': <NEW_LINE> <INDENT> sender = self._get_sender(line[0]) <NEW_LINE> message = self._get_message(line) <NEW_LINE> return sender, message <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _connect_socket(self): <NEW_LINE> <INDENT> con = socket.socket() <NEW_LINE> con.connect((HOST, PORT)) <NEW_LINE> self._con = con <NEW_LINE> <DEDENT> def _send_pong(self, msg): <NEW_LINE> <INDENT> self._con.send(bytes('PONG %s\r\n' % msg, 'UTF-8')) <NEW_LINE> <DEDENT> def _join_channel(self, chan): <NEW_LINE> <INDENT> self._con.send(bytes('JOIN %s\r\n' % chan, 'UTF-8')) <NEW_LINE> <DEDENT> def _get_sender(self, msg): <NEW_LINE> <INDENT> result = "" <NEW_LINE> for char in msg: <NEW_LINE> <INDENT> if char == "!": <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if char != ":": <NEW_LINE> <INDENT> result += char <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def _get_message(self, msg): <NEW_LINE> <INDENT> result = "" <NEW_LINE> i = 3 <NEW_LINE> length = len(msg) <NEW_LINE> while i < length: <NEW_LINE> <INDENT> result += msg[i] + " " <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> result = result.lstrip(':') <NEW_LINE> return result | Client for reading messages from twitch.tv chat. Connects to the chat
server, joins some number of channels, and returns the next message
from all the channels each time get_message is called. | 625990827047854f46340e9b |
class PlaylistView(PlaylistMixin, generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> serializer_class = serializers.PlaylistDetailSerializer <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> obj = super().get_object() <NEW_LINE> obj.media = obj.ordered_media_item_queryset.viewable_by_user(self.request.user) <NEW_LINE> return obj | Endpoint to retrieve an individual playlists. | 625990828a349b6b43687d44 |
class TestDefaults(object): <NEW_LINE> <INDENT> def test_staticfiles_dirs(self): <NEW_LINE> <INDENT> self.assertFileContains('test.txt', 'Can we find') <NEW_LINE> self.assertFileContains(os.path.join('prefix', 'test.txt'), 'Prefix') <NEW_LINE> <DEDENT> def test_staticfiles_dirs_subdir(self): <NEW_LINE> <INDENT> self.assertFileContains('subdir/test.txt', 'Can we find') <NEW_LINE> <DEDENT> def test_staticfiles_dirs_priority(self): <NEW_LINE> <INDENT> self.assertFileContains('test/file.txt', 'STATICFILES_DIRS') <NEW_LINE> <DEDENT> def test_app_files(self): <NEW_LINE> <INDENT> self.assertFileContains('test/file1.txt', 'file1 in the app dir') <NEW_LINE> <DEDENT> def test_nonascii_filenames(self): <NEW_LINE> <INDENT> self.assertFileContains(u'test/fişier.txt', u'fişier in the app dir') <NEW_LINE> <DEDENT> def test_camelcase_filenames(self): <NEW_LINE> <INDENT> self.assertFileContains(u'test/camelCase.txt', u'camelCase') | A few standard test cases. | 62599082283ffb24f3cf5388 |
class DevelopmentConfig(BaseConfig): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> BCRYPT_LOG_ROUNDS = 4 <NEW_LINE> SQLALCHEMY_DATABASE_URI = sqlite_local_base + database_name | Development configuration. | 62599082a8370b77170f1eb8 |
class ModulesToInstallTests(TestCase): <NEW_LINE> <INDENT> def test_notexist(self): <NEW_LINE> <INDENT> root = os.path.dirname(os.path.dirname(twisted.__file__)) <NEW_LINE> for module in notPortedModules: <NEW_LINE> <INDENT> segments = module.split(".") <NEW_LINE> segments[-1] += ".py" <NEW_LINE> path = os.path.join(root, *segments) <NEW_LINE> alternateSegments = module.split(".") + ["__init__.py"] <NEW_LINE> packagePath = os.path.join(root, *alternateSegments) <NEW_LINE> self.assertFalse(os.path.exists(path) or os.path.exists(packagePath), "Module {0} exists".format(module)) | Tests for L{notPortedModules}. | 62599082091ae35668706727 |
class NewsCrawl(object): <NEW_LINE> <INDENT> URL_BASE = 'https://www.google.com/search' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.payload = { 'tbm': 'nws', } <NEW_LINE> self.url = self.URL_BASE <NEW_LINE> <DEDENT> def crawl_term(self, q): <NEW_LINE> <INDENT> if not q: <NEW_LINE> <INDENT> return json.dumps([]) <NEW_LINE> <DEDENT> self.payload['q'] = q <NEW_LINE> r = requests.get(self.url, params=self.payload) <NEW_LINE> html = r.text <NEW_LINE> soup = bs4.BeautifulSoup(html) <NEW_LINE> titles = soup.find_all('a') <NEW_LINE> ans = [] <NEW_LINE> for t in titles: <NEW_LINE> <INDENT> text = t.text <NEW_LINE> href = t.get('href').split('=') <NEW_LINE> if text.lower().find(q) != -1: <NEW_LINE> <INDENT> if len(href) > 1: <NEW_LINE> <INDENT> data = { 'title': text, 'url': href[1], } <NEW_LINE> ans.append(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return json.dumps(ans) | Class used for making news requests. | 625990827cff6e4e811b7529 |
class MyCar: <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> <DEDENT> def move(self, roads_amount, direction): <NEW_LINE> <INDENT> if direction == 0 and self.x > common.MARGIN + constants.ROAD_WIDTH / 2: <NEW_LINE> <INDENT> self.x -= constants.ROAD_WIDTH <NEW_LINE> <DEDENT> elif direction == 2 and self.x < roads_amount * constants.ROAD_WIDTH + common.MARGIN - constants.ROAD_WIDTH / 2: <NEW_LINE> <INDENT> self.x += constants.ROAD_WIDTH | Instance of this class becomes player's car on the game start | 6259908266673b3332c31ee7 |
class _m_Home: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.dt_now = str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")) <NEW_LINE> self.to_return = { "_time": self.dt_now, "_msg": "Welcome to PROJECTNAMEFSKLTN !" } <NEW_LINE> <DEDENT> def get_home(self): <NEW_LINE> <INDENT> self.to_return["_function"] = self.get_home.__name__ <NEW_LINE> return self.to_return | Create index | 625990823346ee7daa3383d6 |
class ThreadedFTPServer(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, server): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.server = server <NEW_LINE> self.server.handler._auth_failed_timeout = 0.1 <NEW_LINE> self.host, self.port = self.server.socket.getsockname()[:2] <NEW_LINE> self.daemon = True <NEW_LINE> self.running = True <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while self.running: <NEW_LINE> <INDENT> self.server.serve_forever(timeout=0.001, blocking=False) <NEW_LINE> <DEDENT> self.server.close_all() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.running = False <NEW_LINE> self.join() | Threaded FTP server for running unit tests. | 62599082fff4ab517ebcf2fe |
class ProfileNotFound(Exception): <NEW_LINE> <INDENT> pass | The profile you named does not exist in $HOME/.getawscreds | 62599082656771135c48ada4 |
class OrderCancelTransaction(BaseEntity): <NEW_LINE> <INDENT> _summary_format = "Cancel Order {orderID}" <NEW_LINE> _name_format = "Transaction {id}" <NEW_LINE> _properties = spec_properties.transaction_OrderCancelTransaction <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(OrderCancelTransaction, self).__init__() <NEW_LINE> self.id = kwargs.get("id") <NEW_LINE> self.time = kwargs.get("time") <NEW_LINE> self.userID = kwargs.get("userID") <NEW_LINE> self.accountID = kwargs.get("accountID") <NEW_LINE> self.batchID = kwargs.get("batchID") <NEW_LINE> self.requestID = kwargs.get("requestID") <NEW_LINE> self.type = kwargs.get("type", "ORDER_CANCEL") <NEW_LINE> self.orderID = kwargs.get("orderID") <NEW_LINE> self.clientOrderID = kwargs.get("clientOrderID") <NEW_LINE> self.reason = kwargs.get("reason") <NEW_LINE> self.replacedByOrderID = kwargs.get("replacedByOrderID") <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_dict(data, ctx): <NEW_LINE> <INDENT> data = data.copy() <NEW_LINE> return OrderCancelTransaction(**data) | An OrderCancelTransaction represents the cancellation of an Order in the
client's Account. | 6259908276e4537e8c3f1067 |
class Handicaps: <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> handicaps = kwargs.get("handicaps", [0, 0]) or [0, 0] <NEW_LINE> self.home = handicaps[0] <NEW_LINE> self.away = handicaps[1] <NEW_LINE> self.home_score_float = float(self.away) if float(self.away) >= 0 else 0 <NEW_LINE> self.away_score_float = float(self.home) if float(self.home) >= 0 else 0 <NEW_LINE> self.home_score_int = int(self.away) if int(self.away) >= 0 else 0 <NEW_LINE> self.away_score_int = int(self.home) if int(self.home) >= 0 else 0 <NEW_LINE> if kwargs.get("handicap_allow_float", True): <NEW_LINE> <INDENT> self.home_score = self.home_score_float <NEW_LINE> self.away_score = self.away_score_float <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.home_score = self.home_score_int <NEW_LINE> self.away_score = self.away_score_int | Defines a few variables to be used in conjuctions with {handicaps.X}
| 625990823617ad0b5ee07c38 |
class aIMAGE_SECTION_HEADER: <NEW_LINE> <INDENT> def __init__(self, section_num, r, ptr): <NEW_LINE> <INDENT> self.array = (IMAGE_SECTION_HEADER * section_num)() <NEW_LINE> self.section_num = section_num <NEW_LINE> self.section_table = ptr <NEW_LINE> for i in range(section_num): <NEW_LINE> <INDENT> self.array[i].sinit(r, ptr) <NEW_LINE> ptr += sizeof(IMAGE_SECTION_HEADER) <NEW_LINE> <DEDENT> <DEDENT> def info(self): <NEW_LINE> <INDENT> print('{:=^60}'.format('SectionTable: {}'.format(self.section_num))) <NEW_LINE> print('Section Table start from: 0x{:08x}'.format(self.section_table)) <NEW_LINE> for i in range(self.section_num): <NEW_LINE> <INDENT> self.array[i].info(i) | aIMAGE_SECTION_HEADER:
This Class is wrapper of array of IMAGE_SECTION_HEADER.
e.g.
array_ish = aIMAGE_SECTION_HEADER(image_file_header.NumberOfSections, data, section_table_ptr) | 625990824c3428357761bda3 |
class setUnion_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING,None), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.success = [] <NEW_LINE> (_etype10, _size7) = iprot.readListBegin() <NEW_LINE> for _i11 in xrange(_size7): <NEW_LINE> <INDENT> _elem12 = iprot.readString().decode('utf-8') <NEW_LINE> self.success.append(_elem12) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('setUnion_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.LIST, 0) <NEW_LINE> oprot.writeListBegin(TType.STRING, len(self.success)) <NEW_LINE> for iter13 in self.success: <NEW_LINE> <INDENT> oprot.writeString(iter13.encode('utf-8')) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success | 6259908244b2445a339b76d1 |
class Sensors(SortedNameList[Sensor._Type]): <NEW_LINE> <INDENT> _T = TypeVar("_T", bound="Sensors") <NEW_LINE> def _loads(self, contents: List[Dict[str, Any]]) -> None: <NEW_LINE> <INDENT> self._data = [] <NEW_LINE> self._names = [] <NEW_LINE> for sensor_info in contents: <NEW_LINE> <INDENT> self.add(Sensor.loads(sensor_info)) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def loads(cls: Type[_T], contents: List[Dict[str, Any]]) -> _T: <NEW_LINE> <INDENT> return common_loads(cls, contents) <NEW_LINE> <DEDENT> def dumps(self) -> List[Dict[str, Any]]: <NEW_LINE> <INDENT> return [sensor.dumps() for sensor in self._data] | This class represents all sensors in a :class:`~tensorbay.dataset.segment.FusionSegment`. | 625990824527f215b58eb714 |
class IconScoreContextFactory(object): <NEW_LINE> <INDENT> def __init__(self, max_size: int) -> None: <NEW_LINE> <INDENT> self._lock = threading.Lock() <NEW_LINE> self._queue = [] <NEW_LINE> self._max_size = max_size <NEW_LINE> <DEDENT> def create(self, context_type: 'IconScoreContextType') -> 'IconScoreContext': <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if len(self._queue) > 0: <NEW_LINE> <INDENT> context = self._queue.pop() <NEW_LINE> context.type = context_type <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> context = IconScoreContext(context_type) <NEW_LINE> <DEDENT> <DEDENT> return context <NEW_LINE> <DEDENT> def destroy(self, context: 'IconScoreContext') -> None: <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if len(self._queue) < self._max_size: <NEW_LINE> <INDENT> context.clear() <NEW_LINE> self._queue.append(context) | IconScoreContextFactory
| 62599082796e427e53850263 |
class COCOSeg(BaseDataset): <NEW_LINE> <INDENT> def __init__(self, base_dir, split, transforms=None, to_tensor=None): <NEW_LINE> <INDENT> super(COCOSeg).__init__(base_dir) <NEW_LINE> self.split = split + '2014' <NEW_LINE> annFile = '{}/annotations/instances_{}.json'.format(base_dir, self.split) <NEW_LINE> self.coco = COCO(annFile) <NEW_LINE> self.ids = self.coco.getImgIds() <NEW_LINE> self.transforms = transforms <NEW_LINE> self.to_tensor = to_tensor <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.ids) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> id_ = self.ids[idx] <NEW_LINE> img_meta = self.coco.loadImgs(id_)[0] <NEW_LINE> annIds = self.coco.getAnnIds(imgIds=img_meta['id']) <NEW_LINE> image = Image.open("{}/{}/{}".format(self._base_dir, self.split, img_meta['file_name'])) <NEW_LINE> if image.mode == 'L': <NEW_LINE> <INDENT> image = image.convert('RGB') <NEW_LINE> <DEDENT> anns = self.coco.loadAnns(annIds) <NEW_LINE> semantic_masks = {} <NEW_LINE> for ann in anns: <NEW_LINE> <INDENT> catId = ann['category_id'] <NEW_LINE> mask = self.coco.annToMask(ann) <NEW_LINE> if catId in semantic_masks: <NEW_LINE> <INDENT> semantic_masks[catId][mask == 1] = catId <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> semantic_mask = np.zeros((img_meta['height'], img_meta['width']), dtype='uint8') <NEW_LINE> semantic_mask[mask == 1] = catId <NEW_LINE> semantic_masks[catId] = semantic_mask <NEW_LINE> <DEDENT> <DEDENT> semantic_masks = {catId: Image.fromarray(semantic_mask) for catId, semantic_mask in semantic_masks.items()} <NEW_LINE> instance_mask = Image.fromarray(np.zeros_like(semantic_mask, dtype='uint8')) <NEW_LINE> scribble_mask = Image.fromarray(np.zeros_like(semantic_mask, dtype='uint8')) <NEW_LINE> sample = {'image': image, 'label': semantic_masks, 'inst': instance_mask, 'scribble': scribble_mask} <NEW_LINE> if self.transforms is not None: <NEW_LINE> <INDENT> sample = self.transforms(sample) <NEW_LINE> <DEDENT> image_t = torch.from_numpy(np.array(sample['image']).transpose(2, 0, 1)) <NEW_LINE> if self.to_tensor is not None: <NEW_LINE> <INDENT> sample = self.to_tensor(sample) <NEW_LINE> <DEDENT> sample['id'] = id_ <NEW_LINE> sample['image_t'] = image_t <NEW_LINE> for key_prefix in self.aux_attrib: <NEW_LINE> <INDENT> aux_attrib_val = self.aux_attrib[key_prefix](sample, **self.aux_attrib_args[key_prefix]) <NEW_LINE> for key_suffix in aux_attrib_val: <NEW_LINE> <INDENT> sample[key_prefix + '_' + key_suffix] = aux_attrib_val[key_suffix] <NEW_LINE> <DEDENT> <DEDENT> return sample | Modified Class for COCO Dataset
Args:
base_dir:
COCO dataset directory
split:
which split to use (default is 2014 version)
choose from ('train', 'val')
transform:
transformations to be performed on images/masks
to_tensor:
transformation to convert PIL Image to tensor | 62599082aad79263cf4302a4 |
class GraphNetwork(_base.AbstractModule): <NEW_LINE> <INDENT> def __init__(self, edge_model_fn, node_model_fn, global_model_fn, reducer=tf.math.unsorted_segment_sum, edge_block_opt=None, node_block_opt=None, global_block_opt=None, name="graph_network"): <NEW_LINE> <INDENT> super(GraphNetwork, self).__init__(name=name) <NEW_LINE> edge_block_opt = _make_default_edge_block_opt(edge_block_opt) <NEW_LINE> node_block_opt = _make_default_node_block_opt(node_block_opt, reducer) <NEW_LINE> global_block_opt = _make_default_global_block_opt(global_block_opt, reducer) <NEW_LINE> with self._enter_variable_scope(): <NEW_LINE> <INDENT> self._edge_block = blocks.EdgeBlock( edge_model_fn=edge_model_fn, **edge_block_opt) <NEW_LINE> self._node_block = blocks.NodeBlock( node_model_fn=node_model_fn, **node_block_opt) <NEW_LINE> self._global_block = blocks.GlobalBlock( global_model_fn=global_model_fn, **global_block_opt) <NEW_LINE> <DEDENT> <DEDENT> def _build(self, graph): <NEW_LINE> <INDENT> return self._global_block(self._node_block(self._edge_block(graph))) | Implementation of a Graph Network.
See https://arxiv.org/abs/1806.01261 for more details. | 62599082d8ef3951e32c8bd4 |
class MatplotlibWidget(Canvas): <NEW_LINE> <INDENT> def __init__(self, parent=None, title='',suptitle='', xlabel='', ylabel='', xlim=None, ylim=None, xscale='linear', yscale='linear', width=6.5, height= 5.5, dpi=40): <NEW_LINE> <INDENT> self.figure = Figure(figsize=(width, height), dpi=dpi) <NEW_LINE> self.ax = self.figure.add_subplot(111) <NEW_LINE> self.ax.set_title(title) <NEW_LINE> self.figure.suptitle(suptitle) <NEW_LINE> self.ax.set_xlabel(xlabel) <NEW_LINE> self.ax.set_ylabel(ylabel) <NEW_LINE> if xscale is not None: <NEW_LINE> <INDENT> self.ax.set_xscale(xscale) <NEW_LINE> <DEDENT> if yscale is not None: <NEW_LINE> <INDENT> self.ax.set_yscale(yscale) <NEW_LINE> <DEDENT> if xlim is not None: <NEW_LINE> <INDENT> self.ax.set_xlim(*xlim) <NEW_LINE> <DEDENT> if ylim is not None: <NEW_LINE> <INDENT> self.ax.set_ylim(*ylim) <NEW_LINE> <DEDENT> Canvas.__init__(self, self.figure) <NEW_LINE> self.setParent(parent) <NEW_LINE> Canvas.setSizePolicy(self, QSizePolicy.Expanding, QSizePolicy.Expanding) <NEW_LINE> Canvas.updateGeometry(self) <NEW_LINE> <DEDENT> def sizeHint(self): <NEW_LINE> <INDENT> w, h = self.get_width_height() <NEW_LINE> return QSize(w, h) <NEW_LINE> <DEDENT> def minimumSizeHint(self): <NEW_LINE> <INDENT> return QSize(10, 10) | MatplotlibWidget inherits PyQt4.QWidget
and matplotlib.backend_bases.FigureCanvasBase | 625990827d847024c075dec6 |
class Status(dbmodels.Model): <NEW_LINE> <INDENT> status_idx = dbmodels.AutoField(primary_key=True) <NEW_LINE> word = dbmodels.CharField(max_length=30) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'tko_status' | The possible results of a test
These objects are populated automatically from a
:ref:`fixture file <django:initial-data-via-fixtures>` | 62599082283ffb24f3cf538a |
class Reader(object): <NEW_LINE> <INDENT> def __init__(self, istream): <NEW_LINE> <INDENT> self.istream = istream | Base class for all readers. | 62599082f548e778e596d07c |
class PelicanArticle(PelicanContentFile): <NEW_LINE> <INDENT> encoding = 'utf-8' <NEW_LINE> extension = NotImplemented <NEW_LINE> re_metadata = NotImplemented <NEW_LINE> def _load(self, file_path): <NEW_LINE> <INDENT> content = super(PelicanArticle, self)._load(file_path) <NEW_LINE> if content[0] == codecs.BOM_UTF8.decode(self.encoding): <NEW_LINE> <INDENT> content = content[1:] <NEW_LINE> <DEDENT> return content <NEW_LINE> <DEDENT> def get_path_metadata(self, settings): <NEW_LINE> <INDENT> return parse_path_metadata(self.filename, settings=settings) <NEW_LINE> <DEDENT> def _parse_metadata(self, metadata, line): <NEW_LINE> <INDENT> found = self.re_metadata.match(line) <NEW_LINE> if found: <NEW_LINE> <INDENT> metadata[found.group(1).lower()] = found.group(2) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_text_metadata(self, text): <NEW_LINE> <INDENT> metadata = {} <NEW_LINE> new_text = '\n'.join(line for line in text.splitlines() if not self._parse_metadata(metadata, line)) <NEW_LINE> return new_text, metadata <NEW_LINE> <DEDENT> def _compose(self, title, text, metadata): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def compose(self, title, text, metadata): <NEW_LINE> <INDENT> self.content = self._compose(title, text, metadata) <NEW_LINE> return self.content <NEW_LINE> <DEDENT> def internal_link(self, text, uri): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def image(self, alt, uri): <NEW_LINE> <INDENT> raise NotImplementedError | Base class for article formats.
The content should always be a unicode str. | 62599082adb09d7d5dc0c043 |
class TestValidatePortNumber(unittest.TestCase): <NEW_LINE> <INDENT> def testPortNumberInt(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, lambda: netutils.ValidatePortNumber(500000)) <NEW_LINE> self.assertEqual(netutils.ValidatePortNumber(5000), 5000) <NEW_LINE> <DEDENT> def testPortNumberStr(self): <NEW_LINE> <INDENT> self.assertRaises(ValueError, lambda: netutils.ValidatePortNumber("pinky bunny")) <NEW_LINE> self.assertEqual(netutils.ValidatePortNumber("5000"), 5000) | Test netutils.ValidatePortNumber | 625990824c3428357761bda5 |
class CompilationUnit: <NEW_LINE> <INDENT> def __init__(self, source): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> self.tree = NoTree <NEW_LINE> self.newNames = newNames.NewName() <NEW_LINE> self.liveRanges = None <NEW_LINE> self.mCode = None | Would usually represent one source file, or similar.
Wraps the source file with the resulting tree. As the tree is transformed, maintains the connection with source data and error reporting. | 6259908299fddb7c1ca63b4f |
class TransformerInfo(IdentifiedObject): <NEW_LINE> <INDENT> pass <NEW_LINE> pass | Set of transformer data, from an equipment library.
| 625990827d847024c075dec8 |
class Staff(Person): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> initializer(self, Staff, name) | Staff Class
This is one of the Child Classes that inherits from the Person
Super Class. It passes information to the Parent Constructor
so as to dictate how the Person is created. It also handles
other responsibilities related to a Staff. | 62599082be7bc26dc9252bcb |
class City(db.Model): <NEW_LINE> <INDENT> __tablename__ = "city_info" <NEW_LINE> citycode = db.Column(db.Integer,primary_key=True) <NEW_LINE> provincecode = db.Column(db.Integer,nullable=False) <NEW_LINE> namecn = db.Column(db.String(20),nullable=False) | 城市 | 625990825fcc89381b266ed2 |
class MenuAction: <NEW_LINE> <INDENT> def __init__(self, name, callback, data=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.data = data <NEW_LINE> self.callback = callback <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> self.callback(self.data) | Action composant un menu textuel,
définie par un nom, une callback (éxécutée si action sélectionnée)
et une donnée perso (ex: id d'une table) | 625990822c8b7c6e89bd52d1 |
class PayLine(Wizard): <NEW_LINE> <INDENT> __name__ = 'account.move.line.pay' <NEW_LINE> start = StateView('account.move.line.pay.start', 'account_payment.move_line_pay_start_view_form', [ Button('Cancel', 'end', 'tryton-cancel'), Button('Pay', 'pay', 'tryton-ok', default=True), ]) <NEW_LINE> pay = StateAction('account_payment.act_payment_form') <NEW_LINE> def get_payment(self, line): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> Payment = pool.get('account.payment') <NEW_LINE> if (line.debit > 0) or (line.credit < 0): <NEW_LINE> <INDENT> kind = 'receivable' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kind = 'payable' <NEW_LINE> <DEDENT> return Payment( journal=self.start.journal, party=line.party, kind=kind, amount=line.payment_amount, line=line, ) <NEW_LINE> <DEDENT> def do_pay(self, action): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> Line = pool.get('account.move.line') <NEW_LINE> Payment = pool.get('account.payment') <NEW_LINE> lines = Line.browse(Transaction().context['active_ids']) <NEW_LINE> payments = [] <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> payments.append(self.get_payment(line)) <NEW_LINE> <DEDENT> payments = Payment.create([p._save_values for p in payments]) <NEW_LINE> return action, { 'res_id': [p.id for p in payments], } | Pay Line | 62599082e1aae11d1e7cf588 |
class ListUsers(ListAPIView): <NEW_LINE> <INDENT> serializer_class = UserSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = User.objects.all() <NEW_LINE> search = self.request.query_params.get('search') <NEW_LINE> if search: <NEW_LINE> <INDENT> return queryset.filter(first_name__icontains=search) | queryset.filter( last_name__icontains=search) | queryset.filter(email__icontains=search) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return queryset | get: List all users (authentication required) | 625990823d592f4c4edbc8d5 |
class PolyakTarget(TargetNetwork): <NEW_LINE> <INDENT> def __init__(self, rate): <NEW_LINE> <INDENT> self._source = None <NEW_LINE> self._target = None <NEW_LINE> self._rate = rate <NEW_LINE> <DEDENT> def __call__(self, *inputs): <NEW_LINE> <INDENT> with torch.no_grad(): <NEW_LINE> <INDENT> return self._target(*inputs) <NEW_LINE> <DEDENT> <DEDENT> def init(self, model): <NEW_LINE> <INDENT> self._source = model <NEW_LINE> self._target = copy.deepcopy(model) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> for target_param, source_param in zip(self._target.parameters(), self._source.parameters()): <NEW_LINE> <INDENT> target_param.data.copy_(target_param.data * (1.0 - self._rate) + source_param.data * self._rate) | TargetNetwork that updates using polyak averaging | 62599082167d2b6e312b830b |
class videoThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, parent,autoStart=True): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.parent = parent <NEW_LINE> self.setDaemon(1) <NEW_LINE> self.start_orig = self.start <NEW_LINE> self.start = self.start_local <NEW_LINE> self.frame = None <NEW_LINE> self.lock = threading.Lock() <NEW_LINE> self.lock.acquire() <NEW_LINE> if autoStart: <NEW_LINE> <INDENT> self.start() <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> app = wx.App() <NEW_LINE> frame = SLMframe(monitor = self.parent.monitor, isImageLock = self.parent.isImageLock) <NEW_LINE> frame.Show(True) <NEW_LINE> self.frame = frame <NEW_LINE> self.lock.release() <NEW_LINE> app.MainLoop() <NEW_LINE> <DEDENT> def start_local(self): <NEW_LINE> <INDENT> self.start_orig() <NEW_LINE> self.lock.acquire() | Run the MainLoop as a thread. Access the frame with self.frame. | 6259908223849d37ff852ba5 |
class ReadOnlyStorageMixin: <NEW_LINE> <INDENT> def save(self, name, content_type, filename, fileobj): <NEW_LINE> <INDENT> raise StorageReadOnlyError('Cannot write to read-only storage') <NEW_LINE> <DEDENT> def delete(self, file_id): <NEW_LINE> <INDENT> raise StorageReadOnlyError('Cannot delete from read-only storage') | Mixin that makes write operations fail with an error. | 625990827c178a314d78e960 |
class FrozenDict(Mapping): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._d = dict(*args, **kwargs) <NEW_LINE> self._hash = None <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._d) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._d) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self._d[key] <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> if self._hash is None: <NEW_LINE> <INDENT> self._hash = 0 <NEW_LINE> for key, value in self.items(): <NEW_LINE> <INDENT> self._hash ^= hash(key) <NEW_LINE> self._hash ^= hash(value) <NEW_LINE> <DEDENT> <DEDENT> return self._hash <NEW_LINE> <DEDENT> def project(self, vars): <NEW_LINE> <INDENT> return FrozenDict((x for x in self.items() if x[0] in vars)) <NEW_LINE> <DEDENT> def disjointDomain(self, other): <NEW_LINE> <INDENT> return not bool(set(self).intersection(other)) <NEW_LINE> <DEDENT> def compatible(self, other): <NEW_LINE> <INDENT> for k in self: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self[k] != other[k]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def merge(self, other): <NEW_LINE> <INDENT> res = FrozenDict(itertools.chain(self.items(), other.items())) <NEW_LINE> return res <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._d) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return repr(self._d) | An immutable hashable dict
Taken from http://stackoverflow.com/a/2704866/81121 | 62599082656771135c48ada6 |
class Controller(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.console_api = console_api.API() <NEW_LINE> <DEDENT> @wsgi.serializers(xml=ConsolesTemplate) <NEW_LINE> def index(self, req, server_id): <NEW_LINE> <INDENT> consoles = self.console_api.get_consoles( req.environ['nova.context'], server_id) <NEW_LINE> return dict(consoles=[_translate_keys(console) for console in consoles]) <NEW_LINE> <DEDENT> def create(self, req, server_id): <NEW_LINE> <INDENT> self.console_api.create_console( req.environ['nova.context'], server_id) <NEW_LINE> <DEDENT> @wsgi.serializers(xml=ConsoleTemplate) <NEW_LINE> def show(self, req, server_id, id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> console = self.console_api.get_console( req.environ['nova.context'], server_id, int(id)) <NEW_LINE> <DEDENT> except exception.NotFound: <NEW_LINE> <INDENT> raise exc.HTTPNotFound() <NEW_LINE> <DEDENT> return _translate_detail_keys(console) <NEW_LINE> <DEDENT> def update(self, req, server_id, id): <NEW_LINE> <INDENT> raise exc.HTTPNotImplemented() <NEW_LINE> <DEDENT> def delete(self, req, server_id, id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.console_api.delete_console(req.environ['nova.context'], server_id, int(id)) <NEW_LINE> <DEDENT> except exception.NotFound: <NEW_LINE> <INDENT> raise exc.HTTPNotFound() <NEW_LINE> <DEDENT> return webob.Response(status_int=202) | The Consoles controller for the OpenStack API. | 625990825fc7496912d48fe1 |
class FunctionElement(Executable, ColumnElement, FromClause): <NEW_LINE> <INDENT> packagenames = () <NEW_LINE> def __init__(self, *clauses, **kwargs): <NEW_LINE> <INDENT> args = [_literal_as_binds(c, self.name) for c in clauses] <NEW_LINE> self.clause_expr = ClauseList( operator=operators.comma_op, group_contents=True, *args). self_group() <NEW_LINE> <DEDENT> def _execute_on_connection(self, connection, multiparams, params): <NEW_LINE> <INDENT> return connection._execute_function(self, multiparams, params) <NEW_LINE> <DEDENT> @property <NEW_LINE> def columns(self): <NEW_LINE> <INDENT> return ColumnCollection(self.label(None)) <NEW_LINE> <DEDENT> @util.memoized_property <NEW_LINE> def clauses(self): <NEW_LINE> <INDENT> return self.clause_expr.element <NEW_LINE> <DEDENT> def over(self, partition_by=None, order_by=None, rows=None, range_=None): <NEW_LINE> <INDENT> return Over( self, partition_by=partition_by, order_by=order_by, rows=rows, range_=range_ ) <NEW_LINE> <DEDENT> def within_group(self, *order_by): <NEW_LINE> <INDENT> return WithinGroup(self, *order_by) <NEW_LINE> <DEDENT> def filter(self, *criterion): <NEW_LINE> <INDENT> if not criterion: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return FunctionFilter(self, *criterion) <NEW_LINE> <DEDENT> def as_comparison(self, left_index, right_index): <NEW_LINE> <INDENT> return FunctionAsBinary( self, left_index, right_index ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _from_objects(self): <NEW_LINE> <INDENT> return self.clauses._from_objects <NEW_LINE> <DEDENT> def get_children(self, **kwargs): <NEW_LINE> <INDENT> return self.clause_expr, <NEW_LINE> <DEDENT> def _copy_internals(self, clone=_clone, **kw): <NEW_LINE> <INDENT> self.clause_expr = clone(self.clause_expr, **kw) <NEW_LINE> self._reset_exported() <NEW_LINE> FunctionElement.clauses._reset(self) <NEW_LINE> <DEDENT> def within_group_type(self, within_group): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def alias(self, name=None, flat=False): <NEW_LINE> <INDENT> return Alias(self, name) <NEW_LINE> <DEDENT> def select(self): <NEW_LINE> <INDENT> s = Select([self]) <NEW_LINE> if self._execution_options: <NEW_LINE> <INDENT> s = s.execution_options(**self._execution_options) <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def scalar(self): <NEW_LINE> <INDENT> return self.select().execute().scalar() <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> return self.select().execute() <NEW_LINE> <DEDENT> def _bind_param(self, operator, obj, type_=None): <NEW_LINE> <INDENT> return BindParameter(None, obj, _compared_to_operator=operator, _compared_to_type=self.type, unique=True, type_=type_) <NEW_LINE> <DEDENT> def self_group(self, against=None): <NEW_LINE> <INDENT> if against is operators.getitem and isinstance(self.type, sqltypes.ARRAY): <NEW_LINE> <INDENT> return Grouping(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(FunctionElement, self).self_group(against=against) | Base for SQL function-oriented constructs.
.. seealso::
:class:`.Function` - named SQL function.
:data:`.func` - namespace which produces registered or ad-hoc
:class:`.Function` instances.
:class:`.GenericFunction` - allows creation of registered function
types. | 6259908297e22403b383c9ec |
class AResourceRecordSet(ResourceRecordSet): <NEW_LINE> <INDENT> rrset_type = 'A' <NEW_LINE> def __init__(self, alias_hosted_zone_id=None, alias_dns_name=None, *args, **kwargs): <NEW_LINE> <INDENT> super(AResourceRecordSet, self).__init__(*args, **kwargs) <NEW_LINE> self.alias_hosted_zone_id = alias_hosted_zone_id <NEW_LINE> self.alias_dns_name = alias_dns_name <NEW_LINE> self._initial_vals.update( dict( alias_hosted_zone_id=alias_hosted_zone_id, alias_dns_name=alias_dns_name, ) ) <NEW_LINE> <DEDENT> def is_alias_record_set(self): <NEW_LINE> <INDENT> return self.alias_hosted_zone_id or self.alias_dns_name | Specific A record class. There are two kinds of A records:
* Regular A records.
* Alias A records. These point at an ELB instance instead of an IP.
Create these via
:py:meth:`HostedZone.create_a_record <route53.hosted_zone.HostedZone.create_a_record>`.
Retrieve them via
:py:meth:`HostedZone.record_sets <route53.hosted_zone.HostedZone.record_sets>`. | 62599082aad79263cf4302a8 |
class SpecCanvas(FigCanvas): <NEW_LINE> <INDENT> def __init__(self, parent=None, width=5, height=4, dpi=100): <NEW_LINE> <INDENT> self.fig = Figure(figsize=(width, height), dpi=dpi) <NEW_LINE> self.ax = self.fig.add_subplot(111) <NEW_LINE> FigCanvas.__init__(self, self.fig) <NEW_LINE> self.setParent(parent) <NEW_LINE> self.tracks = [] <NEW_LINE> self.locked_track = 0 <NEW_LINE> self.inv = self.ax.transData.inverted() <NEW_LINE> self.background = None <NEW_LINE> self.x_high = 39 <NEW_LINE> <DEDENT> def mouse(self, event): <NEW_LINE> <INDENT> x_loc, y_loc = self.inv.transform((event.x, event.y)) <NEW_LINE> if 0 < x_loc < 1 and 0 < y_loc < 1: <NEW_LINE> <INDENT> return self.x_high*x_loc, 5000*y_loc <NEW_LINE> <DEDENT> <DEDENT> def startTracks(self, tracks): <NEW_LINE> <INDENT> self.fig.canvas.draw() <NEW_LINE> self.getBackground() <NEW_LINE> self.tracks = [self.ax.plot(track.points, marker="o", markersize=4, markeredgewidth=0.0) for track in tracks] <NEW_LINE> self.ax.set_xlim(0, self.x_high) <NEW_LINE> self.ax.set_ylim(0, 5000) <NEW_LINE> <DEDENT> def updateTrack(self, trackNo, updated_track): <NEW_LINE> <INDENT> self.tracks[trackNo][0].set_ydata(updated_track) <NEW_LINE> <DEDENT> def getBackground(self): <NEW_LINE> <INDENT> self.background = self.fig.canvas.copy_from_bbox(self.ax.bbox) <NEW_LINE> <DEDENT> def redrawTracks(self): <NEW_LINE> <INDENT> self.fig.canvas.restore_region(self.background) <NEW_LINE> for i in range(len(self.tracks)): <NEW_LINE> <INDENT> self.ax.draw_artist(self.tracks[i][0]) <NEW_LINE> <DEDENT> self.fig.canvas.blit(self.ax.bbox) <NEW_LINE> <DEDENT> def rescaleTracks(self): <NEW_LINE> <INDENT> self.ax.set_xlim(0, self.x_high) <NEW_LINE> self.ax.set_ylim(0, 5000) <NEW_LINE> self.fig.canvas.draw() <NEW_LINE> self.getBackground() <NEW_LINE> for i in range(len(self.tracks)): <NEW_LINE> <INDENT> self.tracks[i][0].set_xdata(np.arange(0, self.x_high, self.x_high/40)) <NEW_LINE> <DEDENT> self.redrawTracks() | Ultimately, this is a QWidget (as well as a FigCanvasAgg, etc.). | 625990822c8b7c6e89bd52d3 |
class Book(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=200) <NEW_LINE> author = models.ForeignKey('Author', on_delete=models.SET_NULL, null=True) <NEW_LINE> description = models.TextField(max_length=1500, help_text='Краткое описание книги') <NEW_LINE> isbn = models.CharField('ISBN', max_length=13, default=isbn, unique=True, help_text='13 Character <a href="https://www.isbn-international.org/content/what-isbn">' 'ISBN number</a>') <NEW_LINE> category = models.ManyToManyField('Category', help_text='Выбирете категорию книги') <NEW_LINE> language = models.ForeignKey('Language', on_delete=models.SET_NULL, null=True) <NEW_LINE> publisher = models.ForeignKey('Publisher', on_delete=models.SET_NULL, null=True) <NEW_LINE> pub_year = models.PositiveSmallIntegerField(null=True, blank=True) <NEW_LINE> num_page = models.PositiveSmallIntegerField(null=True, blank=True) <NEW_LINE> binding = models.CharField(max_length=10, choices=BINDING, default='мягкий') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['title'] <NEW_LINE> verbose_name = 'Книга' <NEW_LINE> verbose_name_plural = 'Книги' <NEW_LINE> permissions = (('has_the_right_to_edit', 'Can edit book'),) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('book-detail', args=[str(self.id)]) <NEW_LINE> <DEDENT> def display_category(self): <NEW_LINE> <INDENT> return ', '.join([category.name for category in self.category.all()[:3]]) <NEW_LINE> <DEDENT> display_category.short_description = 'Category' | Model representing a book. | 62599082a8370b77170f1ebf |
class FilesDict(dict): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> dict.__init__(self) <NEW_LINE> <DEDENT> def __setitem__(self, key, val): <NEW_LINE> <INDENT> if key not in self: <NEW_LINE> <INDENT> dict.__setitem__(self, key, val) <NEW_LINE> <DEDENT> elif self[key] != val: <NEW_LINE> <INDENT> raise ValueError(f'Has different values for same key {key!r}') <NEW_LINE> <DEDENT> <DEDENT> def add_file(self, file_path): <NEW_LINE> <INDENT> if file_path is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> key = os.path.basename(file_path) <NEW_LINE> value = read_file(file_path, 'b') <NEW_LINE> try: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> key = f'{md5(value)}_{key}' <NEW_LINE> self[key] = value <NEW_LINE> <DEDENT> return key <NEW_LINE> <DEDENT> def add_files_from_dict(self, keys, files_dict): <NEW_LINE> <INDENT> inserted_files_dict = {} <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> inserted = self.add_file(files_dict.get(key, None)) <NEW_LINE> if inserted is not None: <NEW_LINE> <INDENT> inserted_files_dict[key] = inserted <NEW_LINE> <DEDENT> <DEDENT> return inserted_files_dict <NEW_LINE> <DEDENT> add_firmware = add_file | Dictionary to store experiment files.
We don't want adding two different values for the same key,
so __setitem__ is overriden to check that | 62599082ad47b63b2c5a933f |
class TeachingKitView(RetrieveAPIView): <NEW_LINE> <INDENT> queryset = TeachingKit.objects.all() <NEW_LINE> serializer_class = TeachingKitSerializer <NEW_LINE> pagination_class = None | A view that allow listing of a single activity
by providing their `id` as a parameter | 62599082f9cc0f698b1c6043 |
class AddToGroup(gui_base.GuiCommandNeedsSelection): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(AddToGroup, self).__init__(name=translate("draft","Add to group")) <NEW_LINE> self.ungroup = QT_TRANSLATE_NOOP("Draft_AddToGroup","Ungroup") <NEW_LINE> <DEDENT> def GetResources(self): <NEW_LINE> <INDENT> _tooltip = () <NEW_LINE> d = {'Pixmap': 'Draft_AddToGroup', 'MenuText': QT_TRANSLATE_NOOP("Draft_AddToGroup","Move to group")+"...", 'ToolTip': QT_TRANSLATE_NOOP("Draft_AddToGroup","Moves the selected objects to an existing group, or removes them from any group.\nCreate a group first to use this tool.")} <NEW_LINE> return d <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> super(AddToGroup, self).Activated() <NEW_LINE> self.groups = [self.ungroup] <NEW_LINE> self.groups.extend(groups.get_group_names()) <NEW_LINE> self.labels = [self.ungroup] <NEW_LINE> for group in self.groups: <NEW_LINE> <INDENT> obj = self.doc.getObject(group) <NEW_LINE> if obj: <NEW_LINE> <INDENT> self.labels.append(obj.Label) <NEW_LINE> <DEDENT> <DEDENT> self.ui = Gui.draftToolBar <NEW_LINE> self.ui.sourceCmd = self <NEW_LINE> self.ui.popupMenu(self.labels) <NEW_LINE> <DEDENT> def proceed(self, labelname): <NEW_LINE> <INDENT> self.ui.sourceCmd = None <NEW_LINE> if labelname == self.ungroup: <NEW_LINE> <INDENT> for obj in Gui.Selection.getSelection(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> groups.ungroup(obj) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if labelname in self.labels: <NEW_LINE> <INDENT> i = self.labels.index(labelname) <NEW_LINE> g = self.doc.getObject(self.groups[i]) <NEW_LINE> for obj in Gui.Selection.getSelection(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> g.addObject(obj) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass | GuiCommand for the Draft_AddToGroup tool.
It adds selected objects to a group, or removes them from any group.
It inherits `GuiCommandNeedsSelection` to only be available
when there is a document and a selection.
See this class for more information. | 625990824a966d76dd5f09d4 |
class bundleFiles(LOFARrecipe): <NEW_LINE> <INDENT> inputs = { 'obsid' : ingredient.StringField( '--obsid', dest="obsid", help="Observation identifier" ), 'pulsar' : ingredient.StringField( '--pulsar', dest="pulsar", help="Pulsar name" ), 'filefactor' : ingredient.IntField( '--filefactor', dest="filefactor", help="factor by which obsid subbands will be RSP split." ), 'arch' : ingredient.StringField( '--arch', dest="arch", help="Destination output Pulsar Archive, string like 'arch134'", ) } <NEW_LINE> outputs = { 'data': ingredient.ListField() } <NEW_LINE> def go(self): <NEW_LINE> <INDENT> super(bundleFiles, self).go() <NEW_LINE> self.logger.info("... Bundling data for obsid " + self.inputs['obsid']) <NEW_LINE> obsid = self.inputs['obsid'] <NEW_LINE> pulsar = self.inputs['pulsar'] <NEW_LINE> arch = self.inputs['arch'] <NEW_LINE> filefactor = self.inputs['filefactor'] <NEW_LINE> logDir = self.config.get('layout','log_directory') <NEW_LINE> userEnv = self.__buildUserEnv() <NEW_LINE> tc, mec = self._get_cluster() <NEW_LINE> targets = mec.get_ids()[0] <NEW_LINE> self.logger.info("Building tar archive ..." ) <NEW_LINE> self.logger.info("Tar archive will appear at OBSID directory level.") <NEW_LINE> self.logger.info("remote import on bundlePlots ...") <NEW_LINE> mec.execute("import bundlePlots",targets=[targets]) <NEW_LINE> self.logger.info("remote instantiation on bundlePlots.BundlePlots ...") <NEW_LINE> mec.execute("tarball = bundlePlots.BundlePlots(\"%s\",\"%s\",\"%s\",\"%s\",\"%s\")" % (obsid,pulsar,arch,logDir,userEnv),targets=[targets]) <NEW_LINE> self.logger.info("calling ballIt() on tarball instance ...") <NEW_LINE> mec.execute("tarch = tarball.ballIt()",targets=[targets]) <NEW_LINE> tarFile = mec.pull("tarch",targets=[targets]) <NEW_LINE> self.logger.info(obsid+" Tar archive built:") <NEW_LINE> self.logger.info(tarFile) <NEW_LINE> return <NEW_LINE> <DEDENT> def __buildUserEnv(self): <NEW_LINE> <INDENT> userEnv = "" <NEW_LINE> userEnv = "LOFARSOFT = "+os.environ["LOFARSOFT"] <NEW_LINE> userEnv += ":TEMPO = " +os.environ["TEMPO"] <NEW_LINE> userEnv += ":PRESTO =" +os.environ["PRESTO"] <NEW_LINE> return userEnv | Pipeline-based mechanism for creagting a tar archive of
output data products from this pipeline.
Parser processes all arguments passed by the framework
through cli arguments and any arguments specified in
tasks configuration files.
Command line arguments override defaults set in the task.cfg.
This recipe will create a gzipped tarball for the obsid
pulsar output.
i.e. a file named like, <pulsarName>_<obsid>_plots.tar.gz
eg.,
B1112+50_L2010_21325_plots.tar.gz | 625990825fdd1c0f98e5fa6e |
class DayTimeInForce(TimeInForce, ITimeInForceHandler): <NEW_LINE> <INDENT> def IsFillValid(self, security, order, fill): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def IsOrderExpired(self, security, order): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, *args): <NEW_LINE> <INDENT> pass | Day Time In Force - order expires at market close
DayTimeInForce() | 62599082bf627c535bcb2fc2 |
class Subnet(BaseAPI): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> id = Field() <NEW_LINE> cidr = Field() <NEW_LINE> gateway = Field() <NEW_LINE> start_ip = Field() <NEW_LINE> end_ip = Field() <NEW_LINE> enable_dhcp = Field() | Args:
id (str): Идентификатор
cidr (str): CIDR
gateway (str): Адрес шлюза
start_ip (str): Начальный адрес для DHCP
end_ip (str): Конечный адрес для DHCP
enable_dhcp (bool): Включить или выключить DHCP | 62599082adb09d7d5dc0c049 |
class Fasta: <NEW_LINE> <INDENT> def __init__(self, header, seq): <NEW_LINE> <INDENT> self.header = header <NEW_LINE> self.seq = seq <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.seq | Simple class to store fasta-formatted sequences | 6259908250812a4eaa62193d |
class Header(CPPFile): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(Header, self).__init__(name) <NEW_LINE> self.included = set() <NEW_LINE> <DEDENT> def add(self, header): <NEW_LINE> <INDENT> super(Header, self).add(header) <NEW_LINE> header.included.add(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Header('{}')".format(self.name) <NEW_LINE> <DEDENT> def asdict(self): <NEW_LINE> <INDENT> d = {"name": self.name} <NEW_LINE> if self.includes: <NEW_LINE> <INDENT> d["includes"] = list(sorted([x.name for x in self.includes])) <NEW_LINE> <DEDENT> return d | Represents a header file, that can be included itself | 625990824c3428357761bdab |
class Client: <NEW_LINE> <INDENT> def __init__(self, session): <NEW_LINE> <INDENT> self.__session = session <NEW_LINE> self.__target_nodes = {} <NEW_LINE> self.listen_to_client() <NEW_LINE> <DEDENT> def listen_to_client(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> request = self.__session.receive().get_data() <NEW_LINE> action = request['action'] <NEW_LINE> _id = request['id'] <NEW_LINE> if action == 'connection': <NEW_LINE> <INDENT> self.__target_nodes[_id] = Node(request['ip'], request['port'], _id, self.send, self.close_connection) <NEW_LINE> <DEDENT> elif action == 'close': <NEW_LINE> <INDENT> self.__target_nodes[_id].close() <NEW_LINE> del self.__target_nodes[_id] <NEW_LINE> <DEDENT> elif action == 'send': <NEW_LINE> <INDENT> self.__target_nodes[_id].send(request['data']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def send(self, _id, data): <NEW_LINE> <INDENT> self.__session.send_object({'action':'send', 'id':_id, 'data': data}) <NEW_LINE> <DEDENT> def close_connection(self, _id): <NEW_LINE> <INDENT> self.__session.send_object({'action':'close', 'id': _id}) | The client that requests the connection | 625990823617ad0b5ee07c40 |
class Locality: <NEW_LINE> <INDENT> def __init__(self, zip=0, short_name=None, long_name=None, canton=None, _zip_type_number=None, _onrp=None): <NEW_LINE> <INDENT> self.zip = zip <NEW_LINE> self.short_name = short_name <NEW_LINE> self.long_name = long_name <NEW_LINE> self.canton = canton <NEW_LINE> if _zip_type_number: <NEW_LINE> <INDENT> self._zip_type_number = _zip_type_number <NEW_LINE> <DEDENT> self._onrp = _onrp <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, Locality) and self.zip == other.zip and self.short_name == other.short_name and self.long_name == other.long_name and self.canton == other.canton and self._zip_type_number == other._zip_type_number and self._onrp == other._onrp <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Locality(zip=%r, short_name=%r, long_name=%r, canton=%r, " "_zip_type_number=%r, _onrp=%r)" % (self.zip, self.short_name, self.long_name, self.canton, self._zip_type_number, self._onrp) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__() <NEW_LINE> <DEDENT> zip = 0 <NEW_LINE> short_name = None <NEW_LINE> long_name = None <NEW_LINE> canton = None <NEW_LINE> zip_type = None <NEW_LINE> _onrp = 0 <NEW_LINE> _zip_type_number = property() <NEW_LINE> __zip_type_number = None <NEW_LINE> @_zip_type_number.getter <NEW_LINE> def _zip_type_number(self): <NEW_LINE> <INDENT> return self.__zip_type_number <NEW_LINE> <DEDENT> @_zip_type_number.setter <NEW_LINE> def _zip_type_number(self, value): <NEW_LINE> <INDENT> self.__zip_type_number = value <NEW_LINE> self.zip_type = ZipType._to_type(value) | A locality is the name of a town, village or any "string"
that goes after the ZIP code in the address. | 625990825fcc89381b266ed5 |
class FittingTabWidget(object): <NEW_LINE> <INDENT> def __init__(self, context, parent): <NEW_LINE> <INDENT> is_frequency_domain = isinstance(context, FrequencyDomainAnalysisContext) <NEW_LINE> if is_frequency_domain: <NEW_LINE> <INDENT> self.fitting_tab_view = BasicFittingView(parent) <NEW_LINE> self.fitting_tab_view.hide_fit_raw_checkbox() <NEW_LINE> self.fitting_tab_model = BasicFittingModel(context, context.fitting_context) <NEW_LINE> self.fitting_tab_presenter = BasicFittingPresenter(self.fitting_tab_view, self.fitting_tab_model) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fitting_tab_view = TFAsymmetryFittingView(parent) <NEW_LINE> self.fitting_tab_view.set_start_and_end_x_labels("Time Start", "Time End") <NEW_LINE> self.fitting_tab_model = TFAsymmetryFittingModel(context, context.fitting_context) <NEW_LINE> self.fitting_tab_presenter = TFAsymmetryFittingPresenter(self.fitting_tab_view, self.fitting_tab_model) <NEW_LINE> <DEDENT> context.update_view_from_model_notifier.add_subscriber( self.fitting_tab_presenter.update_view_from_model_observer) <NEW_LINE> <DEDENT> def show_fit_script_generator(self)->None: <NEW_LINE> <INDENT> self.fitting_tab_view.show_fit_script_generator() | The FittingTabWidget creates the tab used for fitting. Muon Analysis uses the TF Asymmetry fitting widget, and
Frequency Domain Analysis uses the Basic fitting widget. | 6259908226068e7796d4e432 |
class TestUserListTestCase(APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.url = reverse("user-list") <NEW_LINE> self.user_data = factory.build(dict, FACTORY_CLASS=UserFactory) <NEW_LINE> <DEDENT> def test_post_request_with_no_data_fails(self): <NEW_LINE> <INDENT> response = self.client.post(self.url, {}) <NEW_LINE> eq_(response.status_code, status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def test_post_request_with_valid_data_succeeds(self): <NEW_LINE> <INDENT> response = self.client.post(self.url, self.user_data) <NEW_LINE> eq_(response.status_code, status.HTTP_201_CREATED) <NEW_LINE> user = User.objects.get(pk=response.data.get("id")) <NEW_LINE> eq_(user.username, self.user_data.get("username")) <NEW_LINE> ok_(check_password(self.user_data.get("password"), user.password)) | Tests /users list operations. | 62599082ec188e330fdfa39c |
class U_UCB(UCB_discrete): <NEW_LINE> <INDENT> def __init__(self, env, summary_stats, num_rounds, **kwargs): <NEW_LINE> <INDENT> super().__init__(env, summary_stats, num_rounds) <NEW_LINE> self.hyperpara = kwargs.get('hyperpara', None) <NEW_LINE> self.alpha = self.hyperpara[0] <NEW_LINE> <DEDENT> def argmax_ucb(self, t): <NEW_LINE> <INDENT> policy = [] <NEW_LINE> for arm in sorted(self.sample_rewards.keys()): <NEW_LINE> <INDENT> reward = self.sample_rewards[arm] <NEW_LINE> emp_quantile = np.median(reward) <NEW_LINE> t_i = len(reward) <NEW_LINE> b = 6 <NEW_LINE> a = 2 <NEW_LINE> q = 1 <NEW_LINE> cw = self.phi_inverse(self.alpha * np.log(t)/ t_i, b, q, a) <NEW_LINE> policy.append(emp_quantile + cw) <NEW_LINE> <DEDENT> return np.argmax(policy) <NEW_LINE> <DEDENT> def phi_inverse(self,x, b, q, a): <NEW_LINE> <INDENT> return np.max([2 * b * np.sqrt(x/a), 2 * b * np.sqrt(x/a) ** q]) | U-UCB policy from Cassel et al. 2018.
| 625990823d592f4c4edbc8d8 |
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('id', 'name', 'object_type') | Serializer class for User model | 6259908266673b3332c31ef1 |
class Gaussian(): <NEW_LINE> <INDENT> def __init__(self, mu = 0, sigma = 1): <NEW_LINE> <INDENT> self.mean = mu <NEW_LINE> self.stdev = sigma <NEW_LINE> self.data = [] <NEW_LINE> <DEDENT> def calculate_mean(self): <NEW_LINE> <INDENT> total = 0 <NEW_LINE> if(len(self.data) == 0): <NEW_LINE> <INDENT> self.mean = 0 <NEW_LINE> return self.mean <NEW_LINE> <DEDENT> for each in self.data: <NEW_LINE> <INDENT> total += each <NEW_LINE> <DEDENT> self.mean = (total/float(len(self.data))) <NEW_LINE> return self.mean <NEW_LINE> <DEDENT> def calculate_stdev(self, sample=True): <NEW_LINE> <INDENT> total = 0.0 <NEW_LINE> factor = len(self.data) <NEW_LINE> if (sample == True): <NEW_LINE> <INDENT> factor = (len(self.data)-1.0) <NEW_LINE> <DEDENT> if self.mean == 0: <NEW_LINE> <INDENT> self.calculate_mean() <NEW_LINE> <DEDENT> for each in self.data: <NEW_LINE> <INDENT> total += (float(each) - self.mean) ** 2 <NEW_LINE> <DEDENT> mean_of_squared_differences = (total/factor) <NEW_LINE> self.stdev = math.sqrt(mean_of_squared_differences) <NEW_LINE> return self.stdev <NEW_LINE> <DEDENT> def read_data_file(self, file_name, sample=True): <NEW_LINE> <INDENT> with open(file_name) as file: <NEW_LINE> <INDENT> data_list = [] <NEW_LINE> line = file.readline() <NEW_LINE> while line: <NEW_LINE> <INDENT> data_list.append(int(line)) <NEW_LINE> line = file.readline() <NEW_LINE> <DEDENT> <DEDENT> file.close() <NEW_LINE> self.data = data_list <NEW_LINE> self.calculate_mean() <NEW_LINE> self.calculate_stdev(sample) <NEW_LINE> return <NEW_LINE> <DEDENT> def plot_histogram(self): <NEW_LINE> <INDENT> plt.xlabel('Test1') <NEW_LINE> plt.ylabel('Blue1') <NEW_LINE> plt.title('Green3') <NEW_LINE> plt.hist(self.data) <NEW_LINE> plt.show() <NEW_LINE> <DEDENT> def pdf(self, x): <NEW_LINE> <INDENT> main = 1.0 / (self.stdev * math.sqrt(2*math.pi)) <NEW_LINE> second = math.exp(-0.5*((x - self.mean) / self.stdev) ** 2) <NEW_LINE> return main * second <NEW_LINE> <DEDENT> def plot_histogram_pdf(self, n_spaces = 50): <NEW_LINE> <INDENT> mu = self.mean <NEW_LINE> sigma = self.stdev <NEW_LINE> min_range = min(self.data) <NEW_LINE> max_range = max(self.data) <NEW_LINE> interval = 1.0 * (max_range - min_range) / n_spaces <NEW_LINE> x = [] <NEW_LINE> y = [] <NEW_LINE> for i in range(n_spaces): <NEW_LINE> <INDENT> tmp = min_range + interval*i <NEW_LINE> x.append(tmp) <NEW_LINE> y.append(self.pdf(tmp)) <NEW_LINE> <DEDENT> fig, axes = plt.subplots(2,sharex=True) <NEW_LINE> fig.subplots_adjust(hspace=.5) <NEW_LINE> axes[0].hist(self.data, density=True) <NEW_LINE> axes[0].set_title('Normed Histogram of Data') <NEW_LINE> axes[0].set_ylabel('Density') <NEW_LINE> axes[1].plot(x, y) <NEW_LINE> axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation') <NEW_LINE> axes[0].set_ylabel('Density') <NEW_LINE> plt.show() <NEW_LINE> return x, y | Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
| 62599082bf627c535bcb2fc4 |
class ClusterPolicy(object): <NEW_LINE> <INDENT> def __init__(self, cluster_id, policy_id, **kwargs): <NEW_LINE> <INDENT> self.id = kwargs.get('id', None) <NEW_LINE> self.cluster_id = cluster_id <NEW_LINE> self.policy_id = policy_id <NEW_LINE> self.enabled = kwargs.get('enabled') <NEW_LINE> self.data = kwargs.get('data', {}) <NEW_LINE> self.priority = kwargs.get('priority') <NEW_LINE> self.last_op = kwargs.get('last_op', None) <NEW_LINE> self.cluster_name = kwargs.get('cluster_name', '') <NEW_LINE> self.policy_name = kwargs.get('policy_name', '') <NEW_LINE> self.policy_type = kwargs.get('policy_type', '') <NEW_LINE> <DEDENT> def store(self, context): <NEW_LINE> <INDENT> values = { 'enabled': self.enabled, 'data': self.data, 'last_op': self.last_op, 'priority': self.priority } <NEW_LINE> if self.id: <NEW_LINE> <INDENT> cpo.ClusterPolicy.update(context, self.cluster_id, self.policy_id, values) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> binding = cpo.ClusterPolicy.create(context, self.cluster_id, self.policy_id, values) <NEW_LINE> self.cluster_name = binding.cluster.name <NEW_LINE> self.policy_name = binding.policy.name <NEW_LINE> self.policy_type = binding.policy.type <NEW_LINE> self.id = binding.id <NEW_LINE> <DEDENT> return self.id <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_object(cls, context, obj): <NEW_LINE> <INDENT> kwargs = { 'id': obj.id, 'enabled': obj.enabled, 'data': obj.data, 'last_op': obj.last_op, 'priority': obj.priority, 'cluster_name': obj.cluster.name, 'policy_name': obj.policy.name, 'policy_type': obj.policy.type, } <NEW_LINE> return cls(obj.cluster_id, obj.policy_id, context=context, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, context, cluster_id, policy_id): <NEW_LINE> <INDENT> binding = cpo.ClusterPolicy.get(context, cluster_id, policy_id) <NEW_LINE> if binding is None: <NEW_LINE> <INDENT> raise exception.PolicyNotAttached(policy=policy_id, cluster=cluster_id) <NEW_LINE> <DEDENT> return cls._from_object(context, binding) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> binding_dict = { 'id': self.id, 'cluster_id': self.cluster_id, 'policy_id': self.policy_id, 'enabled': self.enabled, 'data': self.data, 'last_op': self.last_op, 'cluster_name': self.cluster_name, 'policy_name': self.policy_name, 'policy_type': self.policy_type, } <NEW_LINE> return binding_dict | Object representing a binding between a cluster and a policy.
This object also records the runtime data of a policy, if any. | 62599082adb09d7d5dc0c04b |
class Manager: <NEW_LINE> <INDENT> def __init__(self, conf): <NEW_LINE> <INDENT> logging.info('Started %s', self.__class__) <NEW_LINE> self.conf = conf <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def rules(rule_list): <NEW_LINE> <INDENT> for rule in rule_list: <NEW_LINE> <INDENT> yield MailRule(rule) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> async def rule_funcs(self): <NEW_LINE> <INDENT> return {'mail': Mail()} <NEW_LINE> <DEDENT> def wants_message(self, mail_from, rcpt_tos, content): <NEW_LINE> <INDENT> wanted = self.conf['messages-we-want'] <NEW_LINE> wanted_to = wanted.get('to') <NEW_LINE> wanted_from = wanted.get('from') <NEW_LINE> logging.debug('We want to: %s or from: %s', wanted_to, wanted_from) <NEW_LINE> logging.debug('We got to: %s and from: %s', rcpt_tos, mail_from) <NEW_LINE> if wanted_to: <NEW_LINE> <INDENT> return wanted_to in rcpt_tos <NEW_LINE> <DEDENT> if wanted_from: <NEW_LINE> <INDENT> return wanted_from == mail_from <NEW_LINE> <DEDENT> <DEDENT> async def process_message(self, mail_from, rcpt_tos, binary_content): <NEW_LINE> <INDENT> logging.debug('process_message("%s", %s, %s)', mail_from, rcpt_tos, binary_content) <NEW_LINE> recipients = [] <NEW_LINE> msg = self.get_message(binary_content) <NEW_LINE> logging.info('Extracted message %s', msg) <NEW_LINE> for rule in self.rules(self.conf['rules']): <NEW_LINE> <INDENT> logging.debug('Check %s', rule) <NEW_LINE> actions = Actions(rule.check(msg, await self.rule_funcs)) <NEW_LINE> recipients.extend([a.destination for a in actions.mailto]) <NEW_LINE> if actions.slack: <NEW_LINE> <INDENT> await self.notify_slack(msg, actions.slack) <NEW_LINE> <DEDENT> <DEDENT> return mail_from, recipients, binary_content <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> async def notify_slack(msg, slack_actions): <NEW_LINE> <INDENT> sm = SlackMessage(msg) <NEW_LINE> await sm.post(slack_actions) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_message(content): <NEW_LINE> <INDENT> return Message(content) <NEW_LINE> <DEDENT> async def test(self): <NEW_LINE> <INDENT> pass | mail.Manager objects are handed mail messages.
Based on the mail2alert configuration and mail content,
they determine what to do with the mail message. | 6259908250812a4eaa62193e |
class DisableWhiteBoxKeyRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.KeyId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.KeyId = params.get("KeyId") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | DisableWhiteBoxKey请求参数结构体
| 6259908263b5f9789fe86c5b |
class AdminHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.render("admin.html") | Displays the admin page with all the admin options | 625990823346ee7daa3383db |
@public <NEW_LINE> class Task(object): <NEW_LINE> <INDENT> def __init__(self, target, args=(), kwargs={}, success=lambda x: x, failure=lambda x: x, infinite=False): <NEW_LINE> <INDENT> self._id = random.getrandbits(128) <NEW_LINE> self._kill_ev = threading.Event() <NEW_LINE> self._infinite = infinite <NEW_LINE> self.target = target <NEW_LINE> self.success = success <NEW_LINE> self.failure = failure <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._infinite = False <NEW_LINE> self._kill_ev.set() <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @property <NEW_LINE> def target(self): <NEW_LINE> <INDENT> return self._target <NEW_LINE> <DEDENT> @target.setter <NEW_LINE> def target(self, value): <NEW_LINE> <INDENT> if not callable(value): <NEW_LINE> <INDENT> raise ATPTaskError("field target need to be a callable") <NEW_LINE> <DEDENT> self._target = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def success(self): <NEW_LINE> <INDENT> return self._success <NEW_LINE> <DEDENT> @success.setter <NEW_LINE> def success(self, value): <NEW_LINE> <INDENT> if not callable(value): <NEW_LINE> <INDENT> raise ATPTaskError("field success need to be a callable") <NEW_LINE> <DEDENT> self._success = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def failure(self): <NEW_LINE> <INDENT> return self._failure <NEW_LINE> <DEDENT> @failure.setter <NEW_LINE> def failure(self, value): <NEW_LINE> <INDENT> if not callable(value): <NEW_LINE> <INDENT> raise ATPTaskError("field failure need to be a callable") <NEW_LINE> <DEDENT> self._failure = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def infinite(self): <NEW_LINE> <INDENT> return self._infinite <NEW_LINE> <DEDENT> @property <NEW_LINE> def args(self): <NEW_LINE> <INDENT> return self._args <NEW_LINE> <DEDENT> @args.setter <NEW_LINE> def args(self, value): <NEW_LINE> <INDENT> if not isinstance(value, (tuple, list)): <NEW_LINE> <INDENT> raise ATPTaskError("field args need to be either a list or a tuple") <NEW_LINE> <DEDENT> value = tuple(value) <NEW_LINE> if self.infinite: <NEW_LINE> <INDENT> self._args = (self._kill_ev,) + value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._args = value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def kwargs(self): <NEW_LINE> <INDENT> return self._kwargs <NEW_LINE> <DEDENT> @kwargs.setter <NEW_LINE> def kwargs(self, value): <NEW_LINE> <INDENT> if not isinstance(value, dict): <NEW_LINE> <INDENT> raise ATPTaskError("field kwargs need to be a dict") <NEW_LINE> <DEDENT> self._kwargs = value | A class to describe a task to be used by the thread pool.
An infinite or a blocking task will have to take a kill :class:`threading.Event`
as the first argument to the function to be able to end operations gracefully
in case a stop event was triggered.
This class takes two callbacks as keyword arguments to handle the cases where
a task succeeds or fails, called ``success`` and ``failure`` respectively.
Calling the :func:`Task.stop` method will signal the worker in the thread pool
to stop execution and exit gracefully. | 62599082283ffb24f3cf5393 |
class ParallelEvaluation(object): <NEW_LINE> <INDENT> __slots__ = ['evaluator', '__initialized__', 'job'] <NEW_LINE> def __init__(self, evaluator): <NEW_LINE> <INDENT> self.evaluator = evaluator <NEW_LINE> self.__initialized__ = 0 <NEW_LINE> <DEDENT> def initialize(self, X, y=None, dir=None): <NEW_LINE> <INDENT> self.job = Job('evaluate') <NEW_LINE> try: <NEW_LINE> <INDENT> self.job.tmp = tempfile.TemporaryDirectory(prefix='mlens_', dir=dir) <NEW_LINE> self.job.dir = self.job.tmp.name <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.job.dir = tempfile.mkdtemp(prefix='mlens_', dir=dir) <NEW_LINE> <DEDENT> for name, arr in zip(('X', 'y'), (X, y)): <NEW_LINE> <INDENT> if isinstance(arr, str): <NEW_LINE> <INDENT> if not arr.split('.')[-1] in ['mmap', 'npy', 'npz']: <NEW_LINE> <INDENT> arr = _load(arr) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(arr, str): <NEW_LINE> <INDENT> f = arr <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f = os.path.join(self.job.dir, '%s.mmap' % name) <NEW_LINE> if os.path.exists(f): <NEW_LINE> <INDENT> os.unlink(f) <NEW_LINE> <DEDENT> dump(arr, f) <NEW_LINE> <DEDENT> if name is 'y': <NEW_LINE> <INDENT> self.job.y = _load_mmap(f) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.job.P = _load_mmap(f) <NEW_LINE> <DEDENT> <DEDENT> self.__initialized__ = 1 <NEW_LINE> gc.collect() <NEW_LINE> <DEDENT> def process(self, attr): <NEW_LINE> <INDENT> check_initialized(self) <NEW_LINE> with Parallel(n_jobs=self.evaluator.n_jobs, temp_folder=self.job.dir, max_nbytes=None, mmap_mode='r+', verbose=self.evaluator.verbose, backend=self.evaluator.backend) as parallel: <NEW_LINE> <INDENT> f = ENGINES['evaluation'](self.evaluator) <NEW_LINE> getattr(f, attr)(parallel, self.job.P, self.job.y, self.job.dir) <NEW_LINE> <DEDENT> <DEDENT> def terminate(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.job.tmp.cleanup() <NEW_LINE> <DEDENT> except (AttributeError, OSError): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> shutil.rmtree(self.job.dir) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> subprocess.Popen('rmdir /S /Q %s' % self.job.dir, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> warnings.warn("Failed to delete cache at %s." "If created with default settings, will be " "removed on reboot. For immediate " "removal, manual removal is required." % self.job.dir, ParallelProcessingWarning) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> del self.job <NEW_LINE> gc.collect() <NEW_LINE> if not len(gc.garbage) == 0: <NEW_LINE> <INDENT> warnings.warn("Clearing process memory failed, " "uncollectable : %r." % gc.garbage, ParallelProcessingWarning) <NEW_LINE> <DEDENT> self.__initialized__ = 0 | Parallel cross-validation engine.
Parameters
----------
evaluator : :class:`Evaluator`
The ``Evaluator`` that instantiated the processor. | 6259908297e22403b383c9f2 |
class CodeBlockParser: <NEW_LINE> <INDENT> language: str <NEW_LINE> def __init__(self, language: str = None, evaluator: Evaluator = None): <NEW_LINE> <INDENT> if language is not None: <NEW_LINE> <INDENT> self.language = language <NEW_LINE> <DEDENT> assert self.language, 'language must be specified!' <NEW_LINE> if evaluator is not None: <NEW_LINE> <INDENT> self.evaluate = evaluator <NEW_LINE> <DEDENT> <DEDENT> def pad(self, source: str, line: int) -> str: <NEW_LINE> <INDENT> return (line+1)*'\n' + source <NEW_LINE> <DEDENT> def evaluate(self, example: Example): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __call__(self, document: Document) -> Iterable[Region]: <NEW_LINE> <INDENT> for start_match in re.finditer(CODEBLOCK_START, document.text): <NEW_LINE> <INDENT> if start_match.group('language') != self.language: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> source_start = start_match.end() <NEW_LINE> indent = str(len(start_match.group('indent'))) <NEW_LINE> end_pattern = re.compile(r'(\n\Z|\n[ \t]{0,'+indent+'}(?=\\S))') <NEW_LINE> end_match = end_pattern.search(document.text, source_start) <NEW_LINE> source_end = end_match.start() <NEW_LINE> source = textwrap.dedent(document.text[source_start:source_end]) <NEW_LINE> yield Region( start_match.start(), source_end, source, self.evaluate ) | A class to instantiate and include when your documentation makes use of
:ref:`codeblock-parser` examples.
:param language:
The language that this parser should look for.
:param evaluator:
The evaluator to use for evaluating code blocks in the specified language.
You can also override the :meth:`evaluate` below. | 62599082aad79263cf4302ae |
class With(Container): <NEW_LINE> <INDENT> _command = "\\with" <NEW_LINE> _inline = True | With container.
Usage::
With(content)
Parameters
==========
content: list, setting overrides
Content of the container
Returns
=======
None
Raises
======
InvalidArgument:
if any arguments are supplied
InvalidContent: UNIMPLEMENTED
if the content conatains anything but lilyflower objects
Notes
=====
With is an inline object - it won't be printed on a newline. Its
contents will, though.
See Also
========
:class:`lilyflower.container.Container`
:class:`lilyflower.errors.InvalidArgument`
:class:`lilyflower.errors.InvalidContent`
References
==========
`Lilypond \\with documentation
<http://lilypond.org/doc/v2.18/Documentation/notation/modifying-context-plug_002dins#index-_005cwith-1>`_
Examples
========
.. testsetup::
from lilyflower.containers import With
from lilyflower.errors import InvalidArgument
.. doctest::
>>> print format(With([]))
\with {
}
>>> try:
... With([], ["invalid argument"])
... except InvalidArgument as e:
... print e
Expects between 0 and 0 arguments. | 6259908226068e7796d4e434 |
class SentCodeTypeFlashCall(TLObject): <NEW_LINE> <INDENT> __slots__: List[str] = ["pattern"] <NEW_LINE> ID = 0xab03c6d9 <NEW_LINE> QUALNAME = "types.auth.SentCodeTypeFlashCall" <NEW_LINE> def __init__(self, *, pattern: str) -> None: <NEW_LINE> <INDENT> self.pattern = pattern <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(data: BytesIO, *args: Any) -> "SentCodeTypeFlashCall": <NEW_LINE> <INDENT> pattern = String.read(data) <NEW_LINE> return SentCodeTypeFlashCall(pattern=pattern) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> data = BytesIO() <NEW_LINE> data.write(Int(self.ID, False)) <NEW_LINE> data.write(String(self.pattern)) <NEW_LINE> return data.getvalue() | This object is a constructor of the base type :obj:`~pyrogram.raw.base.auth.SentCodeType`.
Details:
- Layer: ``122``
- ID: ``0xab03c6d9``
Parameters:
pattern: ``str`` | 6259908266673b3332c31ef3 |
class DummyRoute(APIView): <NEW_LINE> <INDENT> permission_classes = (AllowAny,) <NEW_LINE> def get(self, request, route_id): <NEW_LINE> <INDENT> return Response(DUMMY_ROUTE_DATA) | A mock used for testing. Returns the dummy data in the format expected by the front end. | 6259908297e22403b383c9f3 |
class SpamDetector(object): <NEW_LINE> <INDENT> def clean(self, s): <NEW_LINE> <INDENT> translator = str.maketrans("", "", string.punctuation) <NEW_LINE> return s.translate(translator) <NEW_LINE> <DEDENT> def tokenize(self, text): <NEW_LINE> <INDENT> text = self.clean(text).lower() <NEW_LINE> return re.split("\W+", text) <NEW_LINE> <DEDENT> def get_word_counts(self, words): <NEW_LINE> <INDENT> word_counts = {} <NEW_LINE> for word in words: <NEW_LINE> <INDENT> word_counts[word] = word_counts.get(word, 0.0) + 1.0 <NEW_LINE> <DEDENT> return word_counts <NEW_LINE> <DEDENT> def fit(self, X, Y): <NEW_LINE> <INDENT> self.num_messages = {} <NEW_LINE> self.log_class_priors = {} <NEW_LINE> self.word_counts = {} <NEW_LINE> self.vocab = set() <NEW_LINE> n = len(X) <NEW_LINE> self.num_messages['spam'] = sum(1 for label in Y if label == 1) <NEW_LINE> self.num_messages['ham'] = sum(1 for label in Y if label == 0) <NEW_LINE> self.log_class_priors['spam'] = math.log(self.num_messages['spam'] / n) <NEW_LINE> self.log_class_priors['ham'] = math.log(self.num_messages['ham'] / n) <NEW_LINE> self.word_counts['spam'] = {} <NEW_LINE> self.word_counts['ham'] = {} <NEW_LINE> for x, y in zip(X, Y): <NEW_LINE> <INDENT> c = 'spam' if y == 1 else 'ham' <NEW_LINE> counts = self.get_word_counts(self.tokenize(x)) <NEW_LINE> for word, count in counts.items(): <NEW_LINE> <INDENT> if word not in self.vocab: <NEW_LINE> <INDENT> self.vocab.add(word) <NEW_LINE> <DEDENT> if word not in self.word_counts[c]: <NEW_LINE> <INDENT> self.word_counts[c][word] = 0.0 <NEW_LINE> <DEDENT> self.word_counts[c][word] += count <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for x in X: <NEW_LINE> <INDENT> counts = self.get_word_counts(self.tokenize(x)) <NEW_LINE> spam_score = 0 <NEW_LINE> ham_score = 0 <NEW_LINE> for word, _ in counts.items(): <NEW_LINE> <INDENT> if word not in self.vocab: continue <NEW_LINE> log_w_given_spam = math.log( (self.word_counts['spam'].get(word, 0.0) + 1) / (self.num_messages['spam'] + len(self.vocab)) ) <NEW_LINE> log_w_given_ham = math.log( (self.word_counts['ham'].get(word, 0.0) + 1) / (self.num_messages['ham'] + len(self.vocab)) ) <NEW_LINE> spam_score += log_w_given_spam <NEW_LINE> ham_score += log_w_given_ham <NEW_LINE> <DEDENT> spam_score += self.log_class_priors['spam'] <NEW_LINE> ham_score += self.log_class_priors['ham'] <NEW_LINE> if spam_score > ham_score: <NEW_LINE> <INDENT> result.append(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append(0) <NEW_LINE> <DEDENT> <DEDENT> return result | Implementation of Naive Bayes for binary classification | 6259908260cbc95b06365ae6 |
class MqttDiscoveryUpdate(Entity): <NEW_LINE> <INDENT> def __init__(self, discovery_data, discovery_update=None) -> None: <NEW_LINE> <INDENT> self._discovery_data = discovery_data <NEW_LINE> self._discovery_update = discovery_update <NEW_LINE> self._remove_signal = None <NEW_LINE> self._removed_from_hass = False <NEW_LINE> <DEDENT> async def async_added_to_hass(self) -> None: <NEW_LINE> <INDENT> await super().async_added_to_hass() <NEW_LINE> self._removed_from_hass = False <NEW_LINE> discovery_hash = ( self._discovery_data[ATTR_DISCOVERY_HASH] if self._discovery_data else None ) <NEW_LINE> async def _async_remove_state_and_registry_entry(self) -> None: <NEW_LINE> <INDENT> entity_registry = ( await self.hass.helpers.entity_registry.async_get_registry() ) <NEW_LINE> if entity_registry.async_is_registered(self.entity_id): <NEW_LINE> <INDENT> entity_entry = entity_registry.async_get(self.entity_id) <NEW_LINE> entity_registry.async_remove(self.entity_id) <NEW_LINE> await cleanup_device_registry(self.hass, entity_entry.device_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> await self.async_remove() <NEW_LINE> <DEDENT> <DEDENT> @callback <NEW_LINE> async def discovery_callback(payload): <NEW_LINE> <INDENT> _LOGGER.info( "Got update for entity with hash: %s '%s'", discovery_hash, payload, ) <NEW_LINE> debug_info.update_entity_discovery_data(self.hass, payload, self.entity_id) <NEW_LINE> if not payload: <NEW_LINE> <INDENT> _LOGGER.info("Removing component: %s", self.entity_id) <NEW_LINE> self._cleanup_discovery_on_remove() <NEW_LINE> await _async_remove_state_and_registry_entry(self) <NEW_LINE> <DEDENT> elif self._discovery_update: <NEW_LINE> <INDENT> _LOGGER.info("Updating component: %s", self.entity_id) <NEW_LINE> await self._discovery_update(payload) <NEW_LINE> <DEDENT> <DEDENT> if discovery_hash: <NEW_LINE> <INDENT> debug_info.add_entity_discovery_data( self.hass, self._discovery_data, self.entity_id ) <NEW_LINE> set_discovery_hash(self.hass, discovery_hash) <NEW_LINE> self._remove_signal = async_dispatcher_connect( self.hass, MQTT_DISCOVERY_UPDATED.format(discovery_hash), discovery_callback, ) <NEW_LINE> <DEDENT> <DEDENT> async def async_removed_from_registry(self) -> None: <NEW_LINE> <INDENT> if not self._removed_from_hass: <NEW_LINE> <INDENT> discovery_topic = self._discovery_data[ATTR_DISCOVERY_TOPIC] <NEW_LINE> publish( self.hass, discovery_topic, "", retain=True, ) <NEW_LINE> <DEDENT> <DEDENT> async def async_will_remove_from_hass(self) -> None: <NEW_LINE> <INDENT> self._cleanup_discovery_on_remove() <NEW_LINE> <DEDENT> def _cleanup_discovery_on_remove(self) -> None: <NEW_LINE> <INDENT> if self._discovery_data and not self._removed_from_hass: <NEW_LINE> <INDENT> debug_info.remove_entity_data(self.hass, self.entity_id) <NEW_LINE> clear_discovery_hash(self.hass, self._discovery_data[ATTR_DISCOVERY_HASH]) <NEW_LINE> self._removed_from_hass = True <NEW_LINE> <DEDENT> if self._remove_signal: <NEW_LINE> <INDENT> self._remove_signal() <NEW_LINE> self._remove_signal = None | Mixin used to handle updated discovery message. | 625990823617ad0b5ee07c44 |
class Poll(models.Model): <NEW_LINE> <INDENT> objects = PollManager() <NEW_LINE> title = models.CharField(max_length=100) <NEW_LINE> description = models.CharField(max_length=500) <NEW_LINE> start_time = models.DateTimeField() <NEW_LINE> end_time = models.DateTimeField() <NEW_LINE> visible = models.BooleanField(default=False) <NEW_LINE> is_secret = models.BooleanField(default=False) <NEW_LINE> groups = models.ManyToManyField(DjangoGroup, blank=True) <NEW_LINE> def before_end_time(self): <NEW_LINE> <INDENT> now = timezone.now() <NEW_LINE> return now < self.end_time <NEW_LINE> <DEDENT> def before_start_time(self): <NEW_LINE> <INDENT> now = timezone.now() <NEW_LINE> return now < self.start_time <NEW_LINE> <DEDENT> def in_time_range(self): <NEW_LINE> <INDENT> return not self.before_start_time() and self.before_end_time() <NEW_LINE> <DEDENT> def get_users_voted(self): <NEW_LINE> <INDENT> users = [] <NEW_LINE> for q in self.question_set.all(): <NEW_LINE> <INDENT> if users: <NEW_LINE> <INDENT> users = list(set(q.get_users_voted()) | set(users)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> users = list(q.get_users_voted()) <NEW_LINE> <DEDENT> <DEDENT> return users <NEW_LINE> <DEDENT> def get_num_eligible_voters(self): <NEW_LINE> <INDENT> if self.groups.exists(): <NEW_LINE> <INDENT> return get_user_model().objects.exclude(user_type="service").filter(groups__poll=self).distinct().count() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return get_user_model().objects.exclude(user_type="service").count() <NEW_LINE> <DEDENT> <DEDENT> def get_percentage_voted(self, voted, able): <NEW_LINE> <INDENT> return "{:.1%}".format(0 if able == 0 else voted / able) <NEW_LINE> <DEDENT> def get_voted_string(self): <NEW_LINE> <INDENT> users_voted = len(self.get_users_voted()) <NEW_LINE> users_able = self.get_num_eligible_voters() <NEW_LINE> percent = self.get_percentage_voted(users_voted, users_able) <NEW_LINE> return "{} out of {} ({}) eligible users voted in this poll.".format(users_voted, users_able, percent) <NEW_LINE> <DEDENT> def has_user_voted(self, user): <NEW_LINE> <INDENT> return Answer.objects.filter(question__in=self.question_set.all(), user=user).count() == self.question_set.count() <NEW_LINE> <DEDENT> def can_vote(self, user): <NEW_LINE> <INDENT> if user.has_admin_permission("polls"): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if not self.visible: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not self.in_time_range(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not self.groups.exists(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return user.groups.intersection(self.groups.all()).exists() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title | A Poll, for the TJ community.
Attributes:
title
A title for the poll, that will be displayed to identify it uniquely.
description
A longer description, possibly explaining how to complete the poll.
start_time
A time that the poll should open.
end_time
A time that the poll should close.
visible
Whether the poll is visible to the users it is for.
is_secret
Whether the poll is a 'secret' poll. Poll admins will not be able to view individual
user responses for secret polls.
groups
The Group's that can view--and vote in--the poll. Like Announcements,
if there are none set, then it is public to all.
Access questions for the poll through poll.question_set.all() | 625990823317a56b869bf2be |
class BaseTaskSet(TaskSet): <NEW_LINE> <INDENT> headers = { 'Content-Type': 'application/json' } <NEW_LINE> token = None <NEW_LINE> token_url = None <NEW_LINE> token_title = 'JWT' <NEW_LINE> def unpack_values(self, payload_data): <NEW_LINE> <INDENT> if isinstance(payload_data, dict) is True: <NEW_LINE> <INDENT> for key, val in deepcopy(payload_data).items(): <NEW_LINE> <INDENT> if key == '__compile__': <NEW_LINE> <INDENT> payload_data.update(self.unpack_values(val)) <NEW_LINE> del payload_data['__compile__'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> payload_data.update({key: self.unpack_values(val)}) <NEW_LINE> <DEDENT> <DEDENT> return payload_data <NEW_LINE> <DEDENT> if hasattr(payload_data, '__call__') is True: <NEW_LINE> <INDENT> return payload_data() <NEW_LINE> <DEDENT> return payload_data <NEW_LINE> <DEDENT> def compile_resource(self, resource, params): <NEW_LINE> <INDENT> rtemplate = Environment(loader=BaseLoader).from_string(resource) <NEW_LINE> _params = deepcopy(params) <NEW_LINE> _params.update(self.unpack_values(_params)) <NEW_LINE> return rtemplate.render(**_params) <NEW_LINE> <DEDENT> def get_headers(self): <NEW_LINE> <INDENT> return { 'Content-Type': 'application/json', 'Authorization': '{} {}'.format(self.token_title, self.token) } <NEW_LINE> <DEDENT> def on_start(self): <NEW_LINE> <INDENT> credentials = None <NEW_LINE> if hasattr(self, 'get_credentials'): <NEW_LINE> <INDENT> credentials = self.get_credentials() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> response = self.client.post( self.token_url, data=credentials, headers=self.headers ) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> self.token = response.json().get('token', None) | - Getting token
- Unpack data
- compile resource url | 625990825fdd1c0f98e5fa74 |
class ConnectionDetail(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'private_ip_address': {'readonly': True}, 'link_identifier': {'readonly': True}, 'group_id': {'readonly': True}, 'member_name': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'private_ip_address': {'key': 'privateIpAddress', 'type': 'str'}, 'link_identifier': {'key': 'linkIdentifier', 'type': 'str'}, 'group_id': {'key': 'groupId', 'type': 'str'}, 'member_name': {'key': 'memberName', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionDetail, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.private_ip_address = None <NEW_LINE> self.link_identifier = None <NEW_LINE> self.group_id = None <NEW_LINE> self.member_name = None | ConnectionDetail.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar id: Azure resource Id.
:vartype id: str
:ivar private_ip_address: The private endpoint connection private ip address.
:vartype private_ip_address: str
:ivar link_identifier: The private endpoint connection link identifier.
:vartype link_identifier: str
:ivar group_id: The private endpoint connection group id.
:vartype group_id: str
:ivar member_name: The private endpoint connection member name.
:vartype member_name: str | 6259908297e22403b383c9f4 |
class ClaseTest(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.tests = [] <NEW_LINE> <DEDENT> def completar(self): <NEW_LINE> <INDENT> self.exitosos = len(filter(lambda t: t.exitoso, self.tests)) <NEW_LINE> self.exitoso = len(self.tests) == self.exitosos <NEW_LINE> self.duracion = 0 <NEW_LINE> if len(self.tests): <NEW_LINE> <INDENT> self.duracion = reduce( lambda t1, t2: t1 + t2, [t.duracion for t in self.tests]) | Modelo de la clase de test | 62599082aad79263cf4302b0 |
class FacebookGraphMixin(OAuth2Mixin): <NEW_LINE> <INDENT> _OAUTH_ACCESS_TOKEN_URL = "https://graph.facebook.com/oauth/access_token?" <NEW_LINE> _OAUTH_AUTHORIZE_URL = "https://graph.facebook.com/oauth/authorize?" <NEW_LINE> _OAUTH_NO_CALLBACKS = False <NEW_LINE> @_auth_return_future <NEW_LINE> def get_authenticated_user(self, redirect_uri, client_id, client_secret, code, callback, extra_fields=None): <NEW_LINE> <INDENT> http = self.get_auth_http_client() <NEW_LINE> args = { "redirect_uri": redirect_uri, "code": code, "client_id": client_id, "client_secret": client_secret, } <NEW_LINE> fields = set(['id', 'name', 'first_name', 'last_name', 'locale', 'picture', 'link']) <NEW_LINE> if extra_fields: <NEW_LINE> <INDENT> fields.update(extra_fields) <NEW_LINE> <DEDENT> http.fetch(self._oauth_request_token_url(**args), self.async_callback(self._on_access_token, redirect_uri, client_id, client_secret, callback, fields)) <NEW_LINE> <DEDENT> def _on_access_token(self, redirect_uri, client_id, client_secret, future, fields, response): <NEW_LINE> <INDENT> if response.error: <NEW_LINE> <INDENT> future.set_exception(AuthError('Facebook auth error: %s' % str(response))) <NEW_LINE> return <NEW_LINE> <DEDENT> args = escape.parse_qs_bytes(escape.native_str(response.body)) <NEW_LINE> session = { "access_token": args["access_token"][-1], "expires": args.get("expires") } <NEW_LINE> self.facebook_request( path="/me", callback=self.async_callback( self._on_get_user_info, future, session, fields), access_token=session["access_token"], fields=",".join(fields) ) <NEW_LINE> <DEDENT> def _on_get_user_info(self, future, session, fields, user): <NEW_LINE> <INDENT> if user is None: <NEW_LINE> <INDENT> future.set_result(None) <NEW_LINE> return <NEW_LINE> <DEDENT> fieldmap = {} <NEW_LINE> for field in fields: <NEW_LINE> <INDENT> fieldmap[field] = user.get(field) <NEW_LINE> <DEDENT> fieldmap.update({"access_token": session["access_token"], "session_expires": session.get("expires")}) <NEW_LINE> future.set_result(fieldmap) <NEW_LINE> <DEDENT> @_auth_return_future <NEW_LINE> def facebook_request(self, path, callback, access_token=None, post_args=None, **args): <NEW_LINE> <INDENT> url = "https://graph.facebook.com" + path <NEW_LINE> all_args = {} <NEW_LINE> if access_token: <NEW_LINE> <INDENT> all_args["access_token"] = access_token <NEW_LINE> all_args.update(args) <NEW_LINE> <DEDENT> if all_args: <NEW_LINE> <INDENT> url += "?" + urllib_parse.urlencode(all_args) <NEW_LINE> <DEDENT> callback = self.async_callback(self._on_facebook_request, callback) <NEW_LINE> http = self.get_auth_http_client() <NEW_LINE> if post_args is not None: <NEW_LINE> <INDENT> http.fetch(url, method="POST", body=urllib_parse.urlencode(post_args), callback=callback) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> http.fetch(url, callback=callback) <NEW_LINE> <DEDENT> <DEDENT> def _on_facebook_request(self, future, response): <NEW_LINE> <INDENT> if response.error: <NEW_LINE> <INDENT> future.set_exception(AuthError("Error response %s fetching %s" % (response.error, response.request.url))) <NEW_LINE> return <NEW_LINE> <DEDENT> future.set_result(escape.json_decode(response.body)) <NEW_LINE> <DEDENT> def get_auth_http_client(self): <NEW_LINE> <INDENT> return httpclient.AsyncHTTPClient() | Facebook authentication using the new Graph API and OAuth2. | 625990822c8b7c6e89bd52db |
class TestCliInstall(Base.TestNewMonorepoGitInit): <NEW_LINE> <INDENT> basic_cmd = ["scream", "install", "com_packagea"] <NEW_LINE> test_cmd = ["scream", "install", "com_packagea", "--test"] <NEW_LINE> test_cmd_short = ["scream", "install", "com_packagea", "-t"] <NEW_LINE> install_cmds = [basic_cmd, test_cmd, test_cmd_short ] <NEW_LINE> def test_install_no_packages_created(self): <NEW_LINE> <INDENT> for cmd in self.install_cmds: <NEW_LINE> <INDENT> with chdir(self.TMP_DIR): <NEW_LINE> <INDENT> with mock.patch.object(sys, "argv", cmd): <NEW_LINE> <INDENT> with self.assertRaises(SystemExit) as err: <NEW_LINE> <INDENT> scream.Scream() <NEW_LINE> <DEDENT> self.assertEqual(err.exception.code, 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def test_install_with_packages_created(self): <NEW_LINE> <INDENT> new_cmd = ["scream", "new", "com.packagea"] <NEW_LINE> with chdir(self.TMP_DIR): <NEW_LINE> <INDENT> with mock.patch.object(sys, "argv", new_cmd): <NEW_LINE> <INDENT> with self.assertRaises(SystemExit) as err: <NEW_LINE> <INDENT> scream.Scream() <NEW_LINE> <DEDENT> self.assertEqual(err.exception.code, 0) <NEW_LINE> <DEDENT> subprocess.call(["git", "add", "."]) <NEW_LINE> <DEDENT> for cmd in self.install_cmds: <NEW_LINE> <INDENT> with chdir(self.TMP_DIR): <NEW_LINE> <INDENT> with mock.patch.object(sys, "argv", cmd): <NEW_LINE> <INDENT> with self.assertRaises(SystemExit) as err: <NEW_LINE> <INDENT> scream.Scream() <NEW_LINE> <DEDENT> self.assertEqual(err.exception.code, 0) | Make sure all `scream install` commands run, with or without any packages existing.
| 6259908292d797404e3898d7 |
class SessionForm(messages.Message): <NEW_LINE> <INDENT> name = messages.StringField(1) <NEW_LINE> highlight = messages.StringField(2) <NEW_LINE> speakerKey = messages.StringField(3) <NEW_LINE> duration = messages.IntegerField(4) <NEW_LINE> sessionType = messages.StringField(5) <NEW_LINE> date = messages.StringField(6) <NEW_LINE> startTime = messages.StringField(7) <NEW_LINE> location = messages.StringField(8) | SessionForm -- Single session input form | 62599082656771135c48adab |
class Neo4jKarmabotDatabaseService(KarmabotDatabaseService): <NEW_LINE> <INDENT> pass | Does connections to neo4j | 625990827cff6e4e811b7537 |
class MscaleV3Plus(MscaleBase): <NEW_LINE> <INDENT> def __init__(self, num_classes, trunk='wrn38', criterion=None): <NEW_LINE> <INDENT> super(MscaleV3Plus, self).__init__() <NEW_LINE> self.criterion = criterion <NEW_LINE> self.backbone, s2_ch, _s4_ch, high_level_ch = get_trunk(trunk) <NEW_LINE> self.aspp, aspp_out_ch = get_aspp(high_level_ch, bottleneck_ch=256, output_stride=8) <NEW_LINE> self.bot_fine = nn.Conv2d(s2_ch, 48, kernel_size=1, bias=False) <NEW_LINE> self.bot_aspp = nn.Conv2d(aspp_out_ch, 256, kernel_size=1, bias=False) <NEW_LINE> self.final = nn.Sequential( nn.Conv2d(256 + 48, 256, kernel_size=3, padding=1, bias=False), Norm2d(256), nn.ReLU(inplace=True), nn.Conv2d(256, 256, kernel_size=3, padding=1, bias=False), Norm2d(256), nn.ReLU(inplace=True), nn.Conv2d(256, num_classes, kernel_size=1, bias=False)) <NEW_LINE> scale_in_ch = 2 * (256 + 48) <NEW_LINE> self.scale_attn = nn.Sequential( nn.Conv2d(scale_in_ch, 256, kernel_size=3, padding=1, bias=False), Norm2d(256), nn.ReLU(inplace=True), nn.Conv2d(256, 256, kernel_size=3, padding=1, bias=False), Norm2d(256), nn.ReLU(inplace=True), nn.Conv2d(256, 1, kernel_size=1, bias=False), nn.Sigmoid()) <NEW_LINE> if cfg.OPTIONS.INIT_DECODER: <NEW_LINE> <INDENT> initialize_weights(self.bot_fine) <NEW_LINE> initialize_weights(self.bot_aspp) <NEW_LINE> initialize_weights(self.scale_attn) <NEW_LINE> initialize_weights(self.final) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> initialize_weights(self.final) <NEW_LINE> <DEDENT> <DEDENT> def _fwd(self, x): <NEW_LINE> <INDENT> x_size = x.size() <NEW_LINE> s2_features, _, final_features = self.backbone(x) <NEW_LINE> aspp = self.aspp(final_features) <NEW_LINE> conv_aspp = self.bot_aspp(aspp) <NEW_LINE> conv_s2 = self.bot_fine(s2_features) <NEW_LINE> conv_aspp = Upsample(conv_aspp, s2_features.size()[2:]) <NEW_LINE> cat_s4 = [conv_s2, conv_aspp] <NEW_LINE> cat_s4 = torch.cat(cat_s4, 1) <NEW_LINE> final = self.final(cat_s4) <NEW_LINE> out = Upsample(final, x_size[2:]) <NEW_LINE> return out, cat_s4 | DeepLabV3Plus-based mscale segmentation model | 625990824f6381625f19a22a |
class TestIoK8sApiCoreV1LoadBalancerIngress(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testIoK8sApiCoreV1LoadBalancerIngress(self): <NEW_LINE> <INDENT> pass | IoK8sApiCoreV1LoadBalancerIngress unit test stubs | 62599082f9cc0f698b1c6047 |
class SourceMeta(type): <NEW_LINE> <INDENT> def __new__(self, name, bases, dct): <NEW_LINE> <INDENT> if all([not '_read' in dct, name != 'Source', not name.endswith('Mixin')]): <NEW_LINE> <INDENT> msg = '%s is missing the required "_read" method' % name <NEW_LINE> raise NotImplementedError(msg) <NEW_LINE> <DEDENT> dct['_meta'] = MetaInfo( readonly='_write' not in dct, source_name=name, is_typed=dct.get('_is_typed', True) ) <NEW_LINE> return super(SourceMeta, self).__new__(self, name, bases, dct) <NEW_LINE> <DEDENT> def __call__(cls, *args, **kwargs): <NEW_LINE> <INDENT> instance = super(SourceMeta, cls).__call__(*args, **kwargs) <NEW_LINE> instance._initialized = True <NEW_LINE> return instance | Initialize subclasses and source base class | 6259908260cbc95b06365ae7 |
class DummyModule(AbstractModule): <NEW_LINE> <INDENT> def update(self, data): <NEW_LINE> <INDENT> pass | Dies Klasse wartet auf button 1 und spielt dann die Animation walkready | 625990827047854f46340eab |
class TestApiFactory(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self) -> None: <NEW_LINE> <INDENT> print("testing ApiFactory Class...") <NEW_LINE> self.api_data_1 = ApiFactory.create_api_client(ApiEnum.api_data_il) <NEW_LINE> self.api_data_2 = ApiFactory.create_api_client(ApiEnum.api_data_il) <NEW_LINE> self.api_data_3 = ApiFactory.create_api_client(ApiEnum.api_data_il) <NEW_LINE> <DEDENT> def tearDown(self) -> None: <NEW_LINE> <INDENT> print("finished testing ApiFactory Class...") <NEW_LINE> <DEDENT> def test_create_api_client(self) -> None: <NEW_LINE> <INDENT> self.assertIsInstance(self.api_data_1, ApiDataIL) <NEW_LINE> self.assertIsInstance(self.api_data_2, ApiDataIL) <NEW_LINE> self.assertIsInstance(self.api_data_3, ApiDataIL) <NEW_LINE> self.assertEqual(id(self.api_data_1), id(self.api_data_2), id(self.api_data_3)) <NEW_LINE> lru_cache_info = ApiFactory.create_api_client.cache_info() <NEW_LINE> self.assertEqual(lru_cache_info.hits, 2) <NEW_LINE> self.assertEqual(lru_cache_info.misses, 1) <NEW_LINE> with self.assertRaises(TypeError): <NEW_LINE> <INDENT> self.api_data_1 = ApiFactory.create_api_client(1) | API Factory for creating Types of API Clients.
Methods:
def setUp(self): announce of starting the class's tests and initialize api's instances
def tearDown(self): announce of finishing the class's tests
def test_create_api_client(self): test api's class instance creation and lru_cache's behaviour as "singleton". | 625990824c3428357761bdb1 |
class dlgDatabaseConfig( QDialog ): <NEW_LINE> <INDENT> u <NEW_LINE> def __init__( self, parent = None ): <NEW_LINE> <INDENT> super( dlgDatabaseConfig, self ).__init__( parent ) <NEW_LINE> self.setupUi() <NEW_LINE> self.buttonBox.accepted.connect( self.accept ) <NEW_LINE> self.buttonBox.rejected.connect( self.reject ) <NEW_LINE> <DEDENT> def setupUi( self ): <NEW_LINE> <INDENT> self.txtServer = QLineEdit() <NEW_LINE> self.txtDatabase = QLineEdit() <NEW_LINE> self.txtUser = QLineEdit() <NEW_LINE> self.txtPassword = QLineEdit() <NEW_LINE> self.txtPassword.setEchoMode( QLineEdit.Password ) <NEW_LINE> self.txtReports = QLineEdit() <NEW_LINE> formLayout = QFormLayout() <NEW_LINE> formLayout.addRow( "&Servidor", self.txtServer ) <NEW_LINE> formLayout.addRow( "Base de &Datos", self.txtDatabase ) <NEW_LINE> formLayout.addRow( "&Usuario", self.txtUser ) <NEW_LINE> formLayout.addRow( u"&Contraseña", self.txtPassword ) <NEW_LINE> formLayout.addRow( u"Servidor de &Reportes", self.txtReports ) <NEW_LINE> self.buttonBox = QDialogButtonBox( QDialogButtonBox.Ok | QDialogButtonBox.Cancel ) <NEW_LINE> verticalLayout = QVBoxLayout() <NEW_LINE> verticalLayout.addLayout( formLayout ) <NEW_LINE> verticalLayout.addWidget( self.buttonBox ) <NEW_LINE> self.setLayout( verticalLayout ) | Dialogo usado para pedir al usuario nuevos valores de configuración | 6259908299fddb7c1ca63b55 |
class RemoteTorrent(EventEmitter, BareRemoteTorrent): <NEW_LINE> <INDENT> pass | A proxy to the Torrent object in WebTorrent server. | 62599082be7bc26dc9252bd1 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.