code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Core(containers.DeclarativeContainer): <NEW_LINE> <INDENT> config = providers.Configuration('config') <NEW_LINE> inter_domain_events_publisher = providers.Singleton(EventsPublisher, 'InterDomain') <NEW_LINE> command_router = providers.Singleton(CommandRouter)
IoC container of core component providers.
6259906baad79263cf42ffcb
class InvalidBodyError(HTTPError): <NEW_LINE> <INDENT> pass
An attempt was made to send a request with a body object that Hip does not support.
6259906ba8370b77170f1bdc
class BaseDependency(object): <NEW_LINE> <INDENT> def __init__(self, class_or_interface=None, required=True, default=None): <NEW_LINE> <INDENT> self.class_or_interface = class_or_interface <NEW_LINE> self.required = required <NEW_LINE> self.default = default <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def bound_instance(self, value): <NEW_LINE> <INDENT> return value
Container encapsulating some dependency
6259906b7d847024c075dbf1
class VariableCollectionItem(Model): <NEW_LINE> <INDENT> def __init__(self, key=None): <NEW_LINE> <INDENT> self.openapi_types = { 'key': str } <NEW_LINE> self.attribute_map = { 'key': 'key' } <NEW_LINE> self._key = key <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'VariableCollectionItem': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return self._key <NEW_LINE> <DEDENT> @key.setter <NEW_LINE> def key(self, key): <NEW_LINE> <INDENT> self._key = key
NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually.
6259906b442bda511e95d963
class BackboneNtoAspOD(bb2YHBond): <NEW_LINE> <INDENT> def __init__(self, model, rcutoff=3.2, *args, **kwds): <NEW_LINE> <INDENT> bb2YHBond.__init__(self, model, rats=['ASPOD1', 'ASPOD2'], sameres=False, namekl=get_asp_names, rcutoff=rcutoff, *args, **kwds)
Backbone nitrogen (donor) to Asp oxygen
6259906b8da39b475be04a02
class Bundles(object): <NEW_LINE> <INDENT> phrase_path = 'testprojects/src/java/org/pantsbuild/testproject/phrases' <NEW_LINE> class Bundle(object): <NEW_LINE> <INDENT> def __init__(self, spec, text): <NEW_LINE> <INDENT> self.spec = spec <NEW_LINE> self.text = text <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.spec, self.text)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def full_spec(self): <NEW_LINE> <INDENT> return '{project}:{name}'.format(project=Bundles.phrase_path, name=self.spec) <NEW_LINE> <DEDENT> <DEDENT> lesser_of_two = Bundle('lesser-of-two', "One must choose the lesser of two eval()s.") <NEW_LINE> once_upon_a_time = Bundle('once-upon-a-time', "Once upon a time, in a far away land, there were some pants.") <NEW_LINE> ten_thousand = Bundle('ten-thousand', "And now you must face my army of ten thousand BUILD files.") <NEW_LINE> there_was_a_duck = Bundle('there-was-a-duck', "And also, there was a duck.") <NEW_LINE> all_bundles = [lesser_of_two, once_upon_a_time, ten_thousand, there_was_a_duck,]
Container class to hold test bundle specifications.
6259906b5166f23b2e244be9
class AGAHistoricalGamesLoader(Command): <NEW_LINE> <INDENT> option_list = ( Option('--sql_dump', '-d', dest='agagd_dump_filename'), Option('--pin_changes', '-p', dest='pin_change_dump_filename') ) <NEW_LINE> def setup(self, pin_change_dump_filename): <NEW_LINE> <INDENT> name = 'AGA' <NEW_LINE> server = db.session.query(GoServer).filter_by(name=name).first() <NEW_LINE> if server: <NEW_LINE> <INDENT> self.server_id = server.id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Creating AGA Server object') <NEW_LINE> self.server_id = create_server(name) <NEW_LINE> <DEDENT> self._users = {} <NEW_LINE> with open(pin_change_dump_filename) as f: <NEW_LINE> <INDENT> self._pin_changes = {line['old']: line['new'] for line in pin_change_parser(f) if line['old'] != line['new']} <NEW_LINE> <DEDENT> <DEDENT> def get_or_make_user(self, aga_id): <NEW_LINE> <INDENT> while aga_id in self._pin_changes: <NEW_LINE> <INDENT> aga_id = self._pin_changes[aga_id] <NEW_LINE> <DEDENT> if aga_id in self._users: <NEW_LINE> <INDENT> return self._users[aga_id] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user = User(aga_id=aga_id, email=uuid4(), fake=True) <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.commit() <NEW_LINE> player = Player(id=aga_id, name='', user_id=user.id, server_id=self.server_id, token=uuid4()) <NEW_LINE> db.session.add(player) <NEW_LINE> self._users[aga_id] = user <NEW_LINE> return user <NEW_LINE> <DEDENT> <DEDENT> def store_game(self, row): <NEW_LINE> <INDENT> user1 = self.get_or_make_user(row['Pin_Player_1']) <NEW_LINE> user2 = self.get_or_make_user(row['Pin_Player_2']) <NEW_LINE> white_user, black_user = (user1, user2) if row['Color_1'] == 'W' else (user2, user1) <NEW_LINE> game = Game(id=row['Game_ID'], server_id=self.server_id, white_id=white_user.aga_id, black_id=black_user.aga_id, date_played=row['Game_Date'], date_reported=row['Game_Date'], result=row['Result'], rated=row['Rated'], handicap=row['Handicap'], komi=row['Komi']) <NEW_LINE> db.session.add(game) <NEW_LINE> <DEDENT> def load_data(self, filename): <NEW_LINE> <INDENT> with open(filename) as f: <NEW_LINE> <INDENT> for i, row in enumerate(agagd_parser(f)): <NEW_LINE> <INDENT> if i % 1000 == 0: <NEW_LINE> <INDENT> print('-Loading row', i) <NEW_LINE> db.session.commit() <NEW_LINE> print('Committed', i) <NEW_LINE> <DEDENT> self.store_game(row) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run(self, agagd_dump_filename, pin_change_dump_filename): <NEW_LINE> <INDENT> self.setup(pin_change_dump_filename) <NEW_LINE> self.load_data(agagd_dump_filename) <NEW_LINE> db.session.commit()
Class which holds a little bit of state used while loading the AGAGD data.
6259906b4e4d562566373c1d
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ["id", "public_username"] <NEW_LINE> read_only_fields = ["id", "public_username"]
Serializer for User model.
6259906b7b180e01f3e49c6f
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('User must have an email address.') <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user=self.create_user(email, name, password) <NEW_LINE> user.is_superuser = True <NEW_LINE> user.is_staff = True <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user
Helps Django work with our custom user model.
6259906b5fdd1c0f98e5f79c
class ExplicitTractInfo(TractInfo): <NEW_LINE> <INDENT> def __init__(self, ident, patchInnerDimensions, patchBorder, ctrCoord, radius, tractOverlap, wcs): <NEW_LINE> <INDENT> vertexList = [] <NEW_LINE> self._radius = radius <NEW_LINE> super(ExplicitTractInfo, self).__init__(ident, patchInnerDimensions, patchBorder, ctrCoord, vertexList, tractOverlap, wcs) <NEW_LINE> self._vertexCoordList = [wcs.pixelToSky(afwGeom.Point2D(p)) for p in self.getBBox().getCorners()] <NEW_LINE> <DEDENT> def _minimumBoundingBox(self, wcs): <NEW_LINE> <INDENT> bbox = afwGeom.Box2D() <NEW_LINE> for i in range(4): <NEW_LINE> <INDENT> coord = self._ctrCoord.clone() <NEW_LINE> coord.offset(i * 90 * afwGeom.degrees, self._radius + self._tractOverlap) <NEW_LINE> pixPos = wcs.skyToPixel(coord) <NEW_LINE> bbox.include(pixPos) <NEW_LINE> <DEDENT> return bbox
Information for a tract specified explicitly A tract is placed at the explicitly defined coordinates, with the nominated radius. The tracts are square (i.e., the radius is really a half-size).
6259906b99cbb53fe68326fe
class Proposal(BlockchainObject, SyncProposal): <NEW_LINE> <INDENT> async def __init__(self, data, *args, **kwargs): <NEW_LINE> <INDENT> self.define_classes() <NEW_LINE> assert self.account_class <NEW_LINE> await BlockchainObject.__init__(self, data, *args, **kwargs) <NEW_LINE> <DEDENT> async def refresh(self): <NEW_LINE> <INDENT> proposal = await self.blockchain.rpc.get_objects([self.identifier]) <NEW_LINE> if not any(proposal): <NEW_LINE> <INDENT> raise ProposalDoesNotExistException <NEW_LINE> <DEDENT> await super(Proposal, self).__init__( proposal[0], blockchain_instance=self.blockchain ) <NEW_LINE> <DEDENT> @property <NEW_LINE> async def proposer(self): <NEW_LINE> <INDENT> if "proposer" in self: <NEW_LINE> <INDENT> return await self.account_class( self["proposer"], blockchain_instance=self.blockchain )
Read data about a Proposal Balance in the chain :param str id: Id of the proposal :param instance blockchain_instance: instance to use when accesing a RPC
6259906bac7a0e7691f73cfe
class Recording(MBTreeElement): <NEW_LINE> <INDENT> idTagName = 'musicbrainz_trackid' <NEW_LINE> def __init__(self, recordingid, pos, parent, tracknumber): <NEW_LINE> <INDENT> super().__init__(recordingid) <NEW_LINE> parent.insertChild(pos, self) <NEW_LINE> self.tracknumber = tracknumber <NEW_LINE> self.parentWork = self.workid = None <NEW_LINE> <DEDENT> def lookupInfo(self): <NEW_LINE> <INDENT> recording = query("recording", self.mbid, ("artist-rels", "work-rels", "artists") ).find("recording") <NEW_LINE> for tag, value in tagsFromQuery(recording): <NEW_LINE> <INDENT> self.tags.add(tag, value) <NEW_LINE> <DEDENT> for relation in recording.iterfind( 'relation-list[@target-type="work"]/relation[@type="performance"]'): <NEW_LINE> <INDENT> if self.workid: <NEW_LINE> <INDENT> logging.warning(__name__, 'more than one work relation in {}'.format(self.mbid)) <NEW_LINE> break <NEW_LINE> <DEDENT> work = Work(relation.findtext('target')) <NEW_LINE> date = relation.findtext('begin') <NEW_LINE> if date: <NEW_LINE> <INDENT> self.tags.add('date', date) <NEW_LINE> <DEDENT> work.lookupInfo() <NEW_LINE> self.mergeWork(work) <NEW_LINE> <DEDENT> <DEDENT> def mergeWork(self, work): <NEW_LINE> <INDENT> self.workid = work.mbid <NEW_LINE> for tag, values in work.tags.items(): <NEW_LINE> <INDENT> if tag in self.tags: <NEW_LINE> <INDENT> if tag == "title": <NEW_LINE> <INDENT> self.tags[tag] = values[:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.tags[tag].extend(values) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.tags[tag] = values <NEW_LINE> <DEDENT> <DEDENT> self.parentWork = work.parentWork
A recording is a unique piece of recorded audio. Every track on a CD is associated to exactly one recording. Since we don't care about tracks, we immediately insert Recordings as children of media.
6259906ba8370b77170f1bdd
class PNGFormatter(BaseFormatter): <NEW_LINE> <INDENT> format_type = Unicode('image/png') <NEW_LINE> print_method = ObjectName('_repr_png_') <NEW_LINE> _return_type = (bytes, unicode_type)
A PNG formatter. To define the callables that compute the PNG representation of your objects, define a :meth:`_repr_png_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be raw PNG data, *not* base64 encoded.
6259906badb09d7d5dc0bd81
class DecodingError(MongoException): <NEW_LINE> <INDENT> pass
Exception thrown whenever the decoding of an object fails
6259906b627d3e7fe0e0869f
class Survival_Problem_TestCase(unittest.TestCase): <NEW_LINE> <INDENT> def _test_value_calculation_size(self, theta_num_col): <NEW_LINE> <INDENT> np.random.seed(10) <NEW_LINE> motif_len = 3 <NEW_LINE> penalty_param = 0.5 <NEW_LINE> feat_gen = HierarchicalMotifFeatureGenerator(motif_lens=[motif_len]) <NEW_LINE> motif_list = feat_gen.motif_list <NEW_LINE> theta = np.random.rand(feat_gen.feature_vec_len, theta_num_col) <NEW_LINE> theta_mask = feat_gen.get_possible_motifs_to_targets(theta.shape) <NEW_LINE> theta[~theta_mask] = -np.inf <NEW_LINE> obs = ObservedSequenceMutations("aggtgggttac", "aggagagttac", motif_len) <NEW_LINE> feat_gen.add_base_features(obs) <NEW_LINE> sample = ImputedSequenceMutations(obs, obs.mutation_pos_dict.keys()) <NEW_LINE> problem_cvx = SurvivalProblemLassoCVXPY(feat_gen, [sample], penalty_param, theta_mask) <NEW_LINE> ll_cvx = problem_cvx.calculate_per_sample_log_lik(theta, sample) <NEW_LINE> value_cvx = problem_cvx.get_value(theta) <NEW_LINE> feature_mut_steps = feat_gen.create_for_mutation_steps(sample) <NEW_LINE> problem_custom = SurvivalProblemLasso(feat_gen, [sample], penalty_param, theta_mask) <NEW_LINE> ll_custom = problem_custom.calculate_per_sample_log_lik(np.exp(theta), problem_custom.precalc_data[0]) <NEW_LINE> value_custom = problem_custom.get_value(theta) <NEW_LINE> self.assertTrue(np.isclose(ll_cvx.value, ll_custom)) <NEW_LINE> self.assertTrue(np.isclose(value_cvx.value, -value_custom)) <NEW_LINE> <DEDENT> def test_value_calculation_size_single(self): <NEW_LINE> <INDENT> self._test_value_calculation_size(1) <NEW_LINE> <DEDENT> @unittest.skip("doesn't work right now") <NEW_LINE> def test_value_calculation_size_per_target(self): <NEW_LINE> <INDENT> self._test_value_calculation_size(NUM_NUCLEOTIDES)
Show that the values from CVXPY and our own impelmentation is the same
6259906b167d2b6e312b8199
class MatrixTransitionObject: <NEW_LINE> <INDENT> def __init__(self, T): <NEW_LINE> <INDENT> self.T = T <NEW_LINE> self.stationary_distribution = get_stationary_distribution(self.T) <NEW_LINE> <DEDENT> def get_nstates(self): <NEW_LINE> <INDENT> return len(self.stationary_distribution) <NEW_LINE> <DEDENT> def get_transition_probability(self, source, sink, distance=1): <NEW_LINE> <INDENT> if distance < 1: <NEW_LINE> <INDENT> raise ValueError('expected a positive integer') <NEW_LINE> <DEDENT> return np.linalg.matrix_power(self.T, distance)[source, sink] <NEW_LINE> <DEDENT> def get_stationary_probability(self, state): <NEW_LINE> <INDENT> return self.stationary_distribution[state] <NEW_LINE> <DEDENT> def get_stationary_distribution(self): <NEW_LINE> <INDENT> return self.stationary_distribution <NEW_LINE> <DEDENT> def get_ntransitions_expected(self, source, sink, distance): <NEW_LINE> <INDENT> raise NotImplementedError()
This is like a transition matrix. Matrix powers could use some caching eventually.
6259906b71ff763f4b5e8fbe
class ManagedObject(sql.Base): <NEW_LINE> <INDENT> __tablename__ = 'managed_objects' <NEW_LINE> unique_identifier = Column('uid', Integer, primary_key=True) <NEW_LINE> _object_type = Column('object_type', sql.EnumType(enums.ObjectType)) <NEW_LINE> _class_type = Column('class_type', String(50)) <NEW_LINE> value = Column('value', VARBINARY(1024)) <NEW_LINE> name_index = Column(Integer, default=0) <NEW_LINE> _names = sqlalchemy.orm.relationship( "ManagedObjectName", back_populates="mo", cascade="all, delete-orphan", order_by="ManagedObjectName.id" ) <NEW_LINE> names = association_proxy('_names', 'name') <NEW_LINE> operation_policy_name = Column( 'operation_policy_name', String(50), default='default' ) <NEW_LINE> sensitive = Column("sensitive", Boolean, default=False) <NEW_LINE> initial_date = Column(Integer, default=0) <NEW_LINE> _owner = Column('owner', String(50), default=None) <NEW_LINE> app_specific_info = sqlalchemy.orm.relationship( "ApplicationSpecificInformation", secondary=app_specific_info_map, back_populates="managed_objects", order_by="ApplicationSpecificInformation.id", passive_deletes=True ) <NEW_LINE> object_groups = sqlalchemy.orm.relationship( "ObjectGroup", secondary=object_group_map, back_populates="managed_objects", order_by="ObjectGroup.id", passive_deletes=True ) <NEW_LINE> __mapper_args__ = { 'polymorphic_identity': 'ManagedObject', 'polymorphic_on': _class_type } <NEW_LINE> __table_args__ = { 'sqlite_autoincrement': True } <NEW_LINE> @abstractmethod <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.value = None <NEW_LINE> self.unique_identifier = None <NEW_LINE> self.name_index = 0 <NEW_LINE> self.names = list() <NEW_LINE> self.operation_policy_name = None <NEW_LINE> self.initial_date = 0 <NEW_LINE> self.sensitive = False <NEW_LINE> self._object_type = None <NEW_LINE> self._owner = None <NEW_LINE> self._application_specific_informations = list() <NEW_LINE> self._contact_information = None <NEW_LINE> self._object_groups = list() <NEW_LINE> self._archive_date = None <NEW_LINE> self._last_change_date = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def object_type(self): <NEW_LINE> <INDENT> return self._object_type <NEW_LINE> <DEDENT> @object_type.setter <NEW_LINE> def object_type(self, value): <NEW_LINE> <INDENT> raise AttributeError("object type cannot be set") <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __eq__(self, other): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __ne__(self, other): <NEW_LINE> <INDENT> pass
The abstract base class of the simplified KMIP object hierarchy. A ManagedObject is a core KMIP object that is the subject of key management operations. It contains various attributes that are common to all types of ManagedObjects, including keys, certificates, and various types of secret or sensitive data. For more information, see Section 2.2 of the KMIP 1.1 specification. Attributes: value: The value of the ManagedObject. Type varies, usually bytes. unique_identifier: The string ID of the ManagedObject. names: A list of names associated with the ManagedObject. object_type: An enumeration associated with the type of ManagedObject.
6259906b1b99ca4002290141
class Show_Contacts: <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> parent = app.root <NEW_LINE> self.parent = parent <NEW_LINE> self.app = app <NEW_LINE> self.select_dialog = Pmw.Dialog(parent, buttons = ('Ok','Cancel'), title = 'Show Contacts Plugin', command = self.button_pressed ) <NEW_LINE> self.select_dialog.withdraw() <NEW_LINE> self.select_object_combo_box = Pmw.ComboBox(self.select_dialog.interior(), scrolledlist_items=[], labelpos='w', label_text='Select loaded object:', listbox_height = 2, dropdown=True) <NEW_LINE> self.select_object_combo_box2 = Pmw.ComboBox(self.select_dialog.interior(), scrolledlist_items=[], labelpos='w', label_text='Select loaded object:', listbox_height = 2, dropdown=True) <NEW_LINE> self.select_object_combo_box.grid(column=1, row=0) <NEW_LINE> self.select_object_combo_box2.grid(column=2, row=0) <NEW_LINE> self.populate_ligand_select_list() <NEW_LINE> self.select_dialog.show() <NEW_LINE> <DEDENT> def button_pressed(self, result): <NEW_LINE> <INDENT> if hasattr(result,'keycode'): <NEW_LINE> <INDENT> if result.keycode == 36: <NEW_LINE> <INDENT> print('keycode:', result.keycode) <NEW_LINE> <DEDENT> <DEDENT> elif result == 'Ok' or result == 'Exit' or result == None: <NEW_LINE> <INDENT> s1 = self.select_object_combo_box.get() <NEW_LINE> s2 = self.select_object_combo_box2.get() <NEW_LINE> show_contacts(s1,s2,'%s_%s'%(s1,s2)) <NEW_LINE> self.select_dialog.withdraw() <NEW_LINE> <DEDENT> elif result == 'Cancel' or result == None: <NEW_LINE> <INDENT> self.select_dialog.withdraw() <NEW_LINE> <DEDENT> <DEDENT> def populate_ligand_select_list(self): <NEW_LINE> <INDENT> loaded_objects = [ name for name in cmd.get_names('all') if '_cluster_' not in name ] <NEW_LINE> self.select_object_combo_box.clear() <NEW_LINE> self.select_object_combo_box2.clear() <NEW_LINE> for ob in loaded_objects: <NEW_LINE> <INDENT> self.select_object_combo_box.insert('end', ob) <NEW_LINE> self.select_object_combo_box2.insert('end', ob)
Main Pymol Plugin Class
6259906b97e22403b383c724
class Episode: <NEW_LINE> <INDENT> def __init__(self, tvshow: TVShow, title: str, season: int, number: int): <NEW_LINE> <INDENT> self.tvshow = tvshow <NEW_LINE> self.title = title <NEW_LINE> self.season = season <NEW_LINE> self.number = number <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.tvshow == other.tvshow and self.title == other.title and self.season == other.season and self.number == other.number <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"Episode({self.tvshow}, {self.title}, {self.season}, " f"{self.number})" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> _episode_pattern = re.compile('\d+x\d+\Z') <NEW_LINE> _tags = {'PROPER', 'REPACK', 'TBA'} <NEW_LINE> @staticmethod <NEW_LINE> def from_title(title: str, tvshow_id: str): <NEW_LINE> <INDENT> words = title.split(" ") <NEW_LINE> for index, word in enumerate(words): <NEW_LINE> <INDENT> match = Episode._episode_pattern.match(word) <NEW_LINE> if match: <NEW_LINE> <INDENT> season, number = map(int, match.group().split('x')) <NEW_LINE> tvshow_name = " ".join(words[:index]) <NEW_LINE> remainder = words[index + 1:] <NEW_LINE> while remainder and remainder[-1] in Episode._tags: <NEW_LINE> <INDENT> remainder = remainder[:-1] <NEW_LINE> <DEDENT> episode_title = " ".join(remainder) <NEW_LINE> return Episode(TVShow(tvshow_id, tvshow_name), episode_title, season, number) <NEW_LINE> <DEDENT> <DEDENT> raise ParseError(f"failed to parse title '{title}'")
Data class representing an Episode
6259906bf7d966606f7494c7
class DeviceRegCompleteRequest(object): <NEW_LINE> <INDENT> def __init__(self, nonce=None): <NEW_LINE> <INDENT> self.swagger_types = { 'nonce': 'str' } <NEW_LINE> self.attribute_map = { 'nonce': 'nonce' } <NEW_LINE> self._nonce = nonce <NEW_LINE> <DEDENT> @property <NEW_LINE> def nonce(self): <NEW_LINE> <INDENT> return self._nonce <NEW_LINE> <DEDENT> @nonce.setter <NEW_LINE> def nonce(self, nonce): <NEW_LINE> <INDENT> self._nonce = nonce <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DeviceRegCompleteRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259906b796e427e5384ff8f
class GtkReactorDeprecation(TestCase): <NEW_LINE> <INDENT> class StubGTK: <NEW_LINE> <INDENT> class GDK: <NEW_LINE> <INDENT> INPUT_READ = None <NEW_LINE> <DEDENT> def input_add(self, *params): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> class StubPyGTK: <NEW_LINE> <INDENT> def require(self, something): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.mods = sys.modules.copy() <NEW_LINE> sys.modules['gtk'] = self.StubGTK() <NEW_LINE> sys.modules['pygtk'] = self.StubPyGTK() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> sys.modules.clear() <NEW_LINE> sys.modules.update(self.mods) <NEW_LINE> <DEDENT> def lookForDeprecationWarning(self, testmethod, attributeName): <NEW_LINE> <INDENT> warningsShown = self.flushWarnings([testmethod]) <NEW_LINE> self.assertEquals(len(warningsShown), 1) <NEW_LINE> self.assertIdentical(warningsShown[0]['category'], DeprecationWarning) <NEW_LINE> self.assertEquals( warningsShown[0]['message'], "twisted.internet.gtkreactor." + attributeName + " " "was deprecated in Twisted 10.1.0: All new applications should be " "written with gtk 2.x, which is supported by " "twisted.internet.gtk2reactor.") <NEW_LINE> <DEDENT> def test_gtkReactor(self): <NEW_LINE> <INDENT> from reqs.twisted.internet import gtkreactor <NEW_LINE> gtkreactor.GtkReactor(); <NEW_LINE> self.lookForDeprecationWarning(self.test_gtkReactor, "GtkReactor") <NEW_LINE> <DEDENT> def test_portableGtkReactor(self): <NEW_LINE> <INDENT> from reqs.twisted.internet import gtkreactor <NEW_LINE> gtkreactor.PortableGtkReactor() <NEW_LINE> self.lookForDeprecationWarning(self.test_portableGtkReactor, "PortableGtkReactor") <NEW_LINE> <DEDENT> def test_install(self): <NEW_LINE> <INDENT> from reqs.twisted.internet import gtkreactor <NEW_LINE> self.assertRaises(AssertionError, gtkreactor.install) <NEW_LINE> self.lookForDeprecationWarning(self.test_install, "install") <NEW_LINE> <DEDENT> def test_portableInstall(self): <NEW_LINE> <INDENT> from reqs.twisted.internet import gtkreactor <NEW_LINE> self.assertRaises(AssertionError, gtkreactor.portableInstall) <NEW_LINE> self.lookForDeprecationWarning(self.test_portableInstall, "portableInstall")
Tests to ensure all attributes of L{twisted.internet.gtkreactor} are deprecated.
6259906bcc0a2c111447c6dc
class MultiVersionGenerateOrchestrator: <NEW_LINE> <INDENT> @inject.autoparams() <NEW_LINE> def __init__(self, evg_api: EvergreenApi, multiversion_util: MultiversionUtilService, gen_task_options: GenTaskOptions) -> None: <NEW_LINE> <INDENT> self.evg_api = evg_api <NEW_LINE> self.multiversion_util = multiversion_util <NEW_LINE> self.gen_task_options = gen_task_options <NEW_LINE> <DEDENT> def generate_fuzzer(self, evg_expansions: EvgExpansions) -> GeneratedConfiguration: <NEW_LINE> <INDENT> suite = evg_expansions.suite <NEW_LINE> is_sharded = self.multiversion_util.is_suite_sharded(suite) <NEW_LINE> version_config_list = get_version_configs(is_sharded) <NEW_LINE> builder = EvgConfigBuilder() <NEW_LINE> fuzzer_task_set = set() <NEW_LINE> for version_config in version_config_list: <NEW_LINE> <INDENT> fuzzer_params = evg_expansions.get_fuzzer_params(version_config, is_sharded) <NEW_LINE> fuzzer_task = builder.generate_fuzzer(fuzzer_params) <NEW_LINE> fuzzer_task_set = fuzzer_task_set.union(fuzzer_task.sub_tasks) <NEW_LINE> <DEDENT> existing_tasks = {ExistingTask(task) for task in fuzzer_task_set} <NEW_LINE> existing_tasks.add({ExistingTask(f"{suite}_multiversion_gen")}) <NEW_LINE> builder.add_display_task(evg_expansions.task, existing_tasks, evg_expansions.build_variant) <NEW_LINE> return builder.build(f"{evg_expansions.task}.json") <NEW_LINE> <DEDENT> def generate_resmoke_suite(self, evg_expansions: EvgExpansions) -> GeneratedConfiguration: <NEW_LINE> <INDENT> suite = evg_expansions.suite or evg_expansions.task <NEW_LINE> is_sharded = self.multiversion_util.is_suite_sharded(suite) <NEW_LINE> split_params = evg_expansions.get_split_params() <NEW_LINE> gen_params = evg_expansions.get_generation_params(is_sharded) <NEW_LINE> builder = EvgConfigBuilder() <NEW_LINE> builder.add_multiversion_suite(split_params, gen_params) <NEW_LINE> builder.add_display_task(GEN_PARENT_TASK, {f"{split_params.task_name}"}, evg_expansions.build_variant) <NEW_LINE> return builder.build(f"{evg_expansions.task}.json") <NEW_LINE> <DEDENT> def generate(self, evg_expansions: EvgExpansions) -> None: <NEW_LINE> <INDENT> if evg_expansions.is_jstestfuzz: <NEW_LINE> <INDENT> generated_config = self.generate_fuzzer(evg_expansions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> generated_config = self.generate_resmoke_suite(evg_expansions) <NEW_LINE> <DEDENT> generated_config.write_all_to_dir(DEFAULT_CONFIG_DIR)
An orchestrator for generating multiversion tasks.
6259906b7047854f46340bce
class Node(object): <NEW_LINE> <INDENT> def __init__(self, val, next_node=None, prev_node=None): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.next_node = next_node <NEW_LINE> self.prev_node = prev_node
Set properties and methods of Node class.
6259906b67a9b606de5476ae
class PeerRelationEvents(ObjectEvents): <NEW_LINE> <INDENT> pass
Peer Relation Events
6259906b442bda511e95d964
class Config(DotSection): <NEW_LINE> <INDENT> def __init__(self, input, relative_to=None): <NEW_LINE> <INDENT> schema = Schema({ 'DXR': { Optional('temp_folder', default=abspath('dxr-temp-{tree}')): AbsPath, Optional('default_tree', default=None): basestring, Optional('disabled_plugins', default=plugin_list('')): Plugins, Optional('enabled_plugins', default=plugin_list('*')): Plugins, Optional('generated_date', default=datetime.utcnow() .strftime("%a, %d %b %Y %H:%M:%S +0000")): basestring, Optional('log_folder', default=abspath('dxr-logs-{tree}')): AbsPath, Optional('workers', default=if_raises(NotImplementedError, cpu_count, 1)): And(Use(int), lambda v: v >= 0, error='"workers" must be a non-negative integer.'), Optional('skip_stages', default=[]): WhitespaceList, Optional('www_root', default=''): Use(lambda v: v.rstrip('/')), Optional('google_analytics_key', default=''): basestring, Optional('es_hosts', default='http://127.0.0.1:9200/'): WhitespaceList, Optional('es_index', default='dxr_{format}_{tree}_{unique}'): basestring, Optional('es_alias', default='dxr_{format}_{tree}'): basestring, Optional('es_catalog_index', default='dxr_catalog'): basestring, Optional('es_catalog_replicas', default=1): Use(int, error='"es_catalog_replicas" must be an integer.'), Optional('max_thumbnail_size', default=20000): And(Use(int), lambda v: v >= 0, error='"max_thumbnail_size" must be a non-negative ' 'integer.'), Optional('es_indexing_timeout', default=60): And(Use(int), lambda v: v >= 0, error='"es_indexing_timeout" must be a non-negative ' 'integer.'), Optional('es_refresh_interval', default=60): Use(int, error='"es_indexing_timeout" must be an integer.') }, basestring: dict }) <NEW_LINE> config_obj = ConfigObj(input.splitlines() if isinstance(input, basestring) else input, list_values=False) <NEW_LINE> if not relative_to: <NEW_LINE> <INDENT> relative_to = getcwd() <NEW_LINE> <DEDENT> with cd(relative_to): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> config = schema.validate(config_obj.dict()) <NEW_LINE> <DEDENT> except SchemaError as exc: <NEW_LINE> <INDENT> raise ConfigError(exc.code, ['DXR']) <NEW_LINE> <DEDENT> self._section = config['DXR'] <NEW_LINE> if self.enabled_plugins.is_all: <NEW_LINE> <INDENT> self._section['enabled_plugins'] = [ p for p in all_plugins_but_core().values() if p not in self.disabled_plugins] <NEW_LINE> <DEDENT> self.trees = OrderedDict() <NEW_LINE> for section in config_obj.sections: <NEW_LINE> <INDENT> if section != 'DXR': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.trees[section] = TreeConfig(section, config[section], config_obj[section].sections, self) <NEW_LINE> <DEDENT> except SchemaError as exc: <NEW_LINE> <INDENT> raise ConfigError(exc.code, [section]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if not self.default_tree: <NEW_LINE> <INDENT> self._section['default_tree'] = first(self.trees.iterkeys()) <NEW_LINE> <DEDENT> del self._section['enabled_plugins'] <NEW_LINE> del self._section['disabled_plugins']
Validation and encapsulation for the DXR config file Examples:: # Settings from the [DXR] section: >>> Config(...).default_tree # Settings from individual trees: >>> Config(...).trees['some-tree'].build_command # Settings from plugin-specific sections of trees: >>> Config(...).trees['some-tree'].buglink.url
6259906be1aae11d1e7cf419
class AliasViewSet(ViewSetWithPermissions): <NEW_LINE> <INDENT> queryset = Alias.objects.prefetch_related('replace') <NEW_LINE> serializer_class = AliasSerializer
API endpoint that allows alias to be viewed or edited.
6259906b3cc13d1c6d466f5e
class CategoryModel(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'categories' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(80)) <NEW_LINE> posts = db.relationship('PostModel', lazy='dynamic') <NEW_LINE> def __init__(self, id, name): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def json(self): <NEW_LINE> <INDENT> return { "id": self.id, "name": self.name } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_by_name(cls, name): <NEW_LINE> <INDENT> return cls.query.filter_by(name=name).first() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_by_id(cls, id): <NEW_LINE> <INDENT> return cls.query.filter_by(id=id).first() <NEW_LINE> <DEDENT> def save_to_db(self): <NEW_LINE> <INDENT> db.session.add(self) <NEW_LINE> db.session.commit() <NEW_LINE> return <NEW_LINE> <DEDENT> def delete_from_db(self): <NEW_LINE> <INDENT> db.session.delete(self) <NEW_LINE> db.session.commit() <NEW_LINE> return
THIS CLASS CONTAINS ALL THE FUNCTION NEEDED TO CREAT, EDIT DELETE CATEGORY ARGS: id: int name: string
6259906bdd821e528d6da58d
class JobTemplate(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=255, db_index=True) <NEW_LINE> description = models.TextField(blank=True) <NEW_LINE> body = models.TextField(help_text=( 'Use {{ content|safe }} at the place where you want to render the ' 'script content of the job' )) <NEW_LINE> project = models.ForeignKey(Project) <NEW_LINE> notification_addresses = models.TextField( help_text='Separate e-mail addresses by a newline', blank=True, ) <NEW_LINE> enqueue_is_enabled = models.BooleanField( default=True, db_index=True, help_text=( 'If unchecked, nothing for this template will be added to ' 'the worker queue. This will not affect already running jobs.' ) ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u'{0} > {1}'.format(self.project, self.title) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> super(JobTemplate, self).save(*args, **kwargs) <NEW_LINE> for job in self.job_set.all(): <NEW_LINE> <INDENT> job.save() <NEW_LINE> <DEDENT> <DEDENT> def get_notification_addresses(self): <NEW_LINE> <INDENT> addresses = self.notification_addresses.strip().split('\n') <NEW_LINE> addresses = [x.strip() for x in addresses if x.strip() != ''] <NEW_LINE> addresses.extend(self.project.get_notification_addresses()) <NEW_LINE> return addresses <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ('project__title', 'title', )
Job templates
6259906bd486a94d0ba2d7d8
class AppSettings(BaseSettings): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if os.getuid() == 0: <NEW_LINE> <INDENT> log_path = '/var/log' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log_path = '/tmp' <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log_path = '/tmp' <NEW_LINE> <DEDENT> if not path.exists(log_path): <NEW_LINE> <INDENT> os.makedirs(log_path) <NEW_LINE> <DEDENT> log_file_path = path.join(log_path, 'data-cap-privacy.log') <NEW_LINE> log_level: str = "DEBUG" <NEW_LINE> class Config(BaseSettings.Config): <NEW_LINE> <INDENT> env_prefix: str = '' <NEW_LINE> case_insensitive = True <NEW_LINE> allow_mutation: bool = False
This class represents all settings the server would need.
6259906b5fdd1c0f98e5f79e
class ICollection(IMinMaxLen, IIterable, IContainer): <NEW_LINE> <INDENT> value_type = Field( title = _("Value Type"), description = _(u"Field value items must conform to the given type, " u"expressed via a Field.")) <NEW_LINE> unique = Bool( title = _('Unique Members'), description = _('Specifies whether the members of the collection ' 'must be unique.'), default=False)
Abstract interface containing a collection value. The Value must be iterable and may have a min_length/max_length.
6259906b460517430c432c62
class MessageQueueTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> address = "127.0.0.1" <NEW_LINE> port = "9999" <NEW_LINE> self.message_queue = MessageQueue( address, port ) <NEW_LINE> self.message_queue.start() <NEW_LINE> class TestClient: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.context = zmq.Context() <NEW_LINE> self.socket = self.context.socket(zmq.REQ) <NEW_LINE> self.socket.connect(f"tcp://{address}:{port}") <NEW_LINE> <DEDENT> def send(self, content: bytes): <NEW_LINE> <INDENT> self.socket.send(content) <NEW_LINE> <DEDENT> def recv(self) -> bytes: <NEW_LINE> <INDENT> return self.socket.recv() <NEW_LINE> <DEDENT> <DEDENT> self.client = TestClient() <NEW_LINE> <DEDENT> def test_message_queue(self): <NEW_LINE> <INDENT> self.client.send(b'[]') <NEW_LINE> socket_id1, init_message = self.message_queue.recv() <NEW_LINE> self.assertEqual(init_message, b'[]') <NEW_LINE> self.message_queue.send( socket_id1, b'' ) <NEW_LINE> judge_message = self.client.recv() <NEW_LINE> self.assertEqual(judge_message, b'') <NEW_LINE> self.client.send(b'[-1]') <NEW_LINE> socket_id2, result_message = self.message_queue.recv() <NEW_LINE> self.assertEqual(socket_id1, socket_id2) <NEW_LINE> self.assertEqual(result_message, b'[-1]')
Test Message Queue. Testing MessageQueue needs permission to listen port 9999 on 127.0.0.1.
6259906b55399d3f05627d3a
class InsertMoleculeMove(GCMove): <NEW_LINE> <INDENT> key = "gcinsertmol" <NEW_LINE> parse_mover = staticmethod(parse_molecule_gcmc) <NEW_LINE> print_mover = staticmethod(print_gcmc)
Concrete class for Grand Canonical molecule moves
6259906b4c3428357761bacc
class TranslateError(Exception): <NEW_LINE> <INDENT> def __init__(self, _file, line, msg): <NEW_LINE> <INDENT> super(TranslateError, self).__init__() <NEW_LINE> self._file = _file <NEW_LINE> self.line = line <NEW_LINE> self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self._file != "arg": <NEW_LINE> <INDENT> return ("Error in file " + os.path.abspath(self._file) + " on line " + str(self.line) + ": " + self.msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ("Error in send_keystrokes() argument: " + self.msg)
Error caused when key_to_hex() cant translate a key
6259906b8a43f66fc4bf39ac
class EnvioModulo(models.Model): <NEW_LINE> <INDENT> MODULO = ( ('151', '290151'), ('152', '290152'), ('153', '290153'), ('154', '290154'), ('251', '290251'), ('252', '290252'), ('253', '290253'), ('254', '290254'), ('291', '290291'), ('351', '290351'), ('352', '290352'), ('353', '290353'), ) <NEW_LINE> lote = models.ForeignKey(Envio, on_delete=models.CASCADE) <NEW_LINE> mac = models.CharField(max_length=3, choices=MODULO) <NEW_LINE> paquetes = models.PositiveSmallIntegerField() <NEW_LINE> formatos = models.IntegerField() <NEW_LINE> recibido_mac = models.DateTimeField() <NEW_LINE> disponible_mac = models.DateTimeField() <NEW_LINE> transito = models.DurationField(editable=False) <NEW_LINE> tran_sec = models.FloatField(editable=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '290%s - %s' % (self.mac, self.lote) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _get_remesa(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for rem in Remesa.objects.all(): <NEW_LINE> <INDENT> if rem.inicio <= self.lote.fecha_corte <= rem.fin: <NEW_LINE> <INDENT> return rem <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Remesa.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.transito = self.disponible_mac - self.lote.recibido_vrd <NEW_LINE> self.tran_sec = self.transito.total_seconds() <NEW_LINE> super(EnvioModulo, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Paquete" <NEW_LINE> verbose_name_plural = "Paquetes a MAC" <NEW_LINE> unique_together = ('lote', 'mac')
Modelo para controlar las entregas a los MAC
6259906ba219f33f346c8021
class SSLUseCommand(Command): <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> logging.info("Running SSL USE command") <NEW_LINE> confidence_points = 0 <NEW_LINE> ssl_files = [] <NEW_LINE> for file_name in self.app.source_paths: <NEW_LINE> <INDENT> with open(file_name, 'r') as f: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for line in f.readlines(): <NEW_LINE> <INDENT> if "SSLSocketFactory" in line or "SSLSession" in line: <NEW_LINE> <INDENT> ssl_files.append(file_name) <NEW_LINE> confidence_points += 1 <NEW_LINE> logging.info("found %d instance of SSL setup in %s", confidence_points, basename(file_name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except UnicodeDecodeError: <NEW_LINE> <INDENT> logging.warning("Unicode error: skipping %s", basename(file_name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if confidence_points > 2: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info("Found no SSL instance use.") <NEW_LINE> return False
Command that will check if app is using SSL! PRECONDITION, must have checked if app uses internet using the InternetUseCommand
6259906b8e7ae83300eea8a8
class PrepaymentBalanceListResponse(object): <NEW_LINE> <INDENT> def __init__(self, data=None, pagination=None): <NEW_LINE> <INDENT> self.swagger_types = { 'data': 'list[PrepaymentBalance]', 'pagination': 'Pagination' } <NEW_LINE> self.attribute_map = { 'data': 'data', 'pagination': 'pagination' } <NEW_LINE> self._data = data <NEW_LINE> self._pagination = pagination <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def pagination(self): <NEW_LINE> <INDENT> return self._pagination <NEW_LINE> <DEDENT> @pagination.setter <NEW_LINE> def pagination(self, pagination): <NEW_LINE> <INDENT> self._pagination = pagination <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259906b7047854f46340bcf
class TestCrc16(TestClass): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def test_calc_crc(self): <NEW_LINE> <INDENT> with self.assertRaises(AttributeError): <NEW_LINE> <INDENT> CRC16.calc_crc(None) <NEW_LINE> <DEDENT> with self.assertRaises(TypeError): <NEW_LINE> <INDENT> CRC16.calc_crc("string_input") <NEW_LINE> <DEDENT> crc = CRC16.calc_crc(b'\x01') <NEW_LINE> self.assertEqual(crc, 0xF1D1) <NEW_LINE> crc = CRC16.calc_crc(b'\x01\x02') <NEW_LINE> self.assertEqual(crc, 0x0E7C) <NEW_LINE> crc = CRC16.calc_crc(b'\x01') <NEW_LINE> crc = CRC16.calc_crc(b'\x02', crc) <NEW_LINE> self.assertEqual(crc, 0x0E7C) <NEW_LINE> crc = CRC16.calc_crc(b'\x01\x02') <NEW_LINE> crc = CRC16.calc_crc(b'\x01\x02', crc) <NEW_LINE> self.assertEqual(crc, 0x8F67)
Test class for main file.
6259906b4a966d76dd5f0704
class TitoGitTestFixture(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.repo_dir = tempfile.mkdtemp("-titotest") <NEW_LINE> print <NEW_LINE> print <NEW_LINE> print("Testing in: %s" % self.repo_dir) <NEW_LINE> print <NEW_LINE> self.repo = git.Repo.init(path=self.repo_dir, mkdir=True, bare=False) <NEW_LINE> os.chdir(self.repo_dir) <NEW_LINE> tito("init") <NEW_LINE> run_command('echo "offline = true" >> rel-eng/tito.props') <NEW_LINE> index = self.repo.index <NEW_LINE> index.add(['rel-eng/tito.props']) <NEW_LINE> index.commit('Setting offline.') <NEW_LINE> <DEDENT> def create_project(self, pkg_name, pkg_dir=''): <NEW_LINE> <INDENT> full_pkg_dir = os.path.join(self.repo_dir, pkg_dir) <NEW_LINE> run_command('mkdir -p %s' % full_pkg_dir) <NEW_LINE> os.chdir(full_pkg_dir) <NEW_LINE> filename = os.path.join(full_pkg_dir, "a.txt") <NEW_LINE> out_f = open(filename, 'w') <NEW_LINE> out_f.write("BLERG\n") <NEW_LINE> out_f.close() <NEW_LINE> filename = os.path.join(full_pkg_dir, "%s.spec" % pkg_name) <NEW_LINE> out_f = open(filename, 'w') <NEW_LINE> out_f.write("Name: %s" % pkg_name) <NEW_LINE> out_f.write(TEST_SPEC) <NEW_LINE> out_f.close() <NEW_LINE> filename = os.path.join(full_pkg_dir, "setup.py") <NEW_LINE> out_f = open(filename, 'w') <NEW_LINE> out_f.write(TEST_SETUP_PY % (pkg_name, pkg_name)) <NEW_LINE> out_f.close() <NEW_LINE> run_command('mkdir -p %s' % os.path.join(full_pkg_dir, "src")) <NEW_LINE> filename = os.path.join(full_pkg_dir, "src", "module.py") <NEW_LINE> out_f = open(filename, 'w') <NEW_LINE> out_f.write(TEST_PYTHON_SRC) <NEW_LINE> out_f.close() <NEW_LINE> index = self.repo.index <NEW_LINE> files = [os.path.join(pkg_dir, 'a.txt'), os.path.join(pkg_dir, 'setup.py'), os.path.join(pkg_dir, '%s.spec' % pkg_name), os.path.join(pkg_dir, 'src/module.py') ] <NEW_LINE> index.add(files) <NEW_LINE> index.commit('Initial commit.') <NEW_LINE> tito('tag --keep-version --debug --accept-auto-changelog')
Fixture providing setup/teardown and utilities for all tests requiring an actual git repository.
6259906b0a50d4780f7069cd
class MovieReviewDataset(): <NEW_LINE> <INDENT> def __init__(self, dataset_path: str, max_length: int): <NEW_LINE> <INDENT> data_review = os.path.join(dataset_path, 'train', 'train_data') <NEW_LINE> data_label = os.path.join(dataset_path, 'train', 'train_label') <NEW_LINE> with open(data_review, 'rt', encoding='utf-8') as f: <NEW_LINE> <INDENT> self.reviews = preprocess(f.readlines(), max_length) <NEW_LINE> <DEDENT> with open(data_label) as f: <NEW_LINE> <INDENT> self.labels =np.array([np.float32(x) for x in f.readlines()]) <NEW_LINE> <DEDENT> self.sentiment = [] <NEW_LINE> for label in self.labels: <NEW_LINE> <INDENT> if label <= 4: <NEW_LINE> <INDENT> self.sentiment.append([1., 0., 0.]) <NEW_LINE> <DEDENT> elif label <= 7: <NEW_LINE> <INDENT> self.sentiment.append([0., 1., 0.]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sentiment.append([0., 0., 1.]) <NEW_LINE> <DEDENT> <DEDENT> self.sentiment = np.array(self.sentiment) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.reviews) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> return self.reviews[idx], self.labels[idx], self.sentiment[idx] <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> order = np.random.permutation(len(self.reviews)) <NEW_LINE> self.reviews = self.reviews[order] <NEW_LINE> self.labels = self.labels[order] <NEW_LINE> self.sentiment = self.sentiment[order]
영화리뷰 데이터를 읽어서, tuple (데이터, 레이블)의 형태로 리턴하는 파이썬 오브젝트 입니다.
6259906b7047854f46340bd0
class NasConv(nn.Layer): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, kernel_size, strides, padding, groups, data_format="channels_last", **kwargs): <NEW_LINE> <INDENT> super(NasConv, self).__init__(**kwargs) <NEW_LINE> self.activ = nn.ReLU() <NEW_LINE> self.conv = Conv2d( in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, strides=strides, padding=padding, groups=groups, use_bias=False, data_format=data_format, name="conv") <NEW_LINE> self.bn = nasnet_batch_norm( channels=out_channels, data_format=data_format, name="bn") <NEW_LINE> <DEDENT> def call(self, x, training=None): <NEW_LINE> <INDENT> x = self.activ(x) <NEW_LINE> x = self.conv(x) <NEW_LINE> x = self.bn(x, training=training) <NEW_LINE> return x
NASNet specific convolution block. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. kernel_size : int or tuple/list of 2 int Convolution window size. strides : int or tuple/list of 2 int Strides of the convolution. padding : int or tuple/list of 2 int Padding value for convolution layer. groups : int Number of groups. data_format : str, default 'channels_last' The ordering of the dimensions in tensors.
6259906bd486a94d0ba2d7d9
class Mapcycle: <NEW_LINE> <INDENT> def __init__(self, cycle): <NEW_LINE> <INDENT> self.value = str(cycle) <NEW_LINE> <DEDENT> @property <NEW_LINE> def year(self): <NEW_LINE> <INDENT> return int(self.value[:4]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def timestamp(self): <NEW_LINE> <INDENT> return pd.Timestamp(self.value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def date(self): <NEW_LINE> <INDENT> return self.timestamp.date()
Convenience class for map cycles
6259906b283ffb24f3cf50c3
class RequestTimeOut(TimeOut): <NEW_LINE> <INDENT> message = "PrePool Request timeout"
required time out
6259906b4e4d562566373c20
class TestGuessManifest(TestCase): <NEW_LINE> <INDENT> _multiprocess_can_split_ = True <NEW_LINE> maxDiff = None <NEW_LINE> expect_good = { "name": "BVZ0022-GC05L-CN650D-Cam07~fullres-orig", "start_datetime": "2013_10_30_03_00_00", "end_datetime": "2013_10_30_06_00_00", "version": 1, "image_type": "jpg", "extension": "JPG", "interval": 30, "missing": [], } <NEW_LINE> def test_good_ts(self): <NEW_LINE> <INDENT> got = ts_guess_manifest(helpers.FILES["timestream_manifold"]) <NEW_LINE> self.assertTrue(isinstance(got, dict)) <NEW_LINE> self.assertDictEqual(got, self.expect_good) <NEW_LINE> <DEDENT> def test_trailing_slash(self): <NEW_LINE> <INDENT> got = ts_guess_manifest(helpers.FILES["timestream_manifold"] + os.sep) <NEW_LINE> self.assertTrue(isinstance(got, dict)) <NEW_LINE> self.assertDictEqual(got, self.expect_good)
Tests for timestream.parse.ts_guess_manifest
6259906b3346ee7daa33826b
class SentimentAdapter(BaseMatchAdapter): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(SentimentAdapter, self).__init__(**kwargs) <NEW_LINE> from chatterbot.conversation.comparisons import sentiment_comparison <NEW_LINE> self.compare_statements = kwargs.get( 'statement_comparison_function', sentiment_comparison )
This adapter selects a response with the closest matching sentiment value to the input statement.
6259906ba8370b77170f1be0
class ActionType: <NEW_LINE> <INDENT> PREPARE_FLANKING_LEFT = 1 <NEW_LINE> PREPARE_FLANKING_RIGHT = 2 <NEW_LINE> ATTACK_BASE = 3
List of possible action types.
6259906b6e29344779b01e6e
class ResultsValues(enum.Enum): <NEW_LINE> <INDENT> SOURCE_FILE = "source_filename" <NEW_LINE> SOURCE_HASH = "checksum_hash" <NEW_LINE> CHECKSUM_FILE = "checksum_file"
Values for results for Checksum reports.
6259906b3cc13d1c6d466f60
class LaunchedDroneRegister(SlotAmountRegister): <NEW_LINE> <INDENT> def __init__(self, fit): <NEW_LINE> <INDENT> SlotAmountRegister.__init__(self, fit, 'launched_drones', Restriction.launched_drone) <NEW_LINE> <DEDENT> def register_holder(self, holder): <NEW_LINE> <INDENT> if isinstance(holder, Drone): <NEW_LINE> <INDENT> SlotAmountRegister.register_holder(self, holder) <NEW_LINE> <DEDENT> <DEDENT> def _get_tainted_holders(self, slots_max): <NEW_LINE> <INDENT> return self._slot_consumers
Implements restriction: Number of launched drones should not exceed number of drones you're allowed to launch. Details: Only holders of Drone class are tracked. For validation, stats module data is used.
6259906b2c8b7c6e89bd5000
class PhaseGate(OneQubitGate): <NEW_LINE> <INDENT> gate_name = 'S' <NEW_LINE> gate_name_latex = 'S' <NEW_LINE> def get_target_matrix(self, format='sympy'): <NEW_LINE> <INDENT> return matrix_cache.get_matrix('S', format) <NEW_LINE> <DEDENT> def _eval_commutator_ZGate(self, other, **hints): <NEW_LINE> <INDENT> return _S.Zero <NEW_LINE> <DEDENT> def _eval_commutator_TGate(self, other, **hints): <NEW_LINE> <INDENT> return _S.Zero
The single qubit phase, or S, gate. This gate rotates the phase of the state by pi/2 if the state is ``|1>`` and does nothing if the state is ``|0>``. Parameters ---------- target : int The target qubit this gate will apply to. Examples ========
6259906b0c0af96317c5796c
class ZoneViewsRemoveResourcesRequest(messages.Message): <NEW_LINE> <INDENT> resources = messages.StringField(1, repeated=True)
The request to remove resources from the resource view. Fields: resources: The list of resources to be removed.
6259906ba8370b77170f1be1
class TestMeetingInfoGet(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testMeetingInfoGet(self): <NEW_LINE> <INDENT> pass
MeetingInfoGet unit test stubs
6259906b3d592f4c4edbc6fa
class CourseNavigationTestMixin(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def test_depth_zero(self): <NEW_LINE> <INDENT> response = self.http_get_for_course( data={'navigation_depth': '0'} ) <NEW_LINE> root_block = response.data[self.block_navigation_view_type][unicode(self.course.location)] <NEW_LINE> self.assertIn('descendants', root_block) <NEW_LINE> self.assertEquals(len(root_block['descendants']), 4) <NEW_LINE> <DEDENT> def test_depth(self): <NEW_LINE> <INDENT> response = self.http_get_for_course() <NEW_LINE> container_descendants = ( (self.course.location, 1), (self.sequential.location, 3), ) <NEW_LINE> for container_location, expected_num_descendants in container_descendants: <NEW_LINE> <INDENT> block = response.data[self.block_navigation_view_type][unicode(container_location)] <NEW_LINE> self.assertIn('descendants', block) <NEW_LINE> self.assertEquals(len(block['descendants']), expected_num_descendants)
A Mixin class for testing all views related to Course navigation.
6259906c2c8b7c6e89bd5001
class RemoteEvent(Event): <NEW_LINE> <INDENT> def __init__(self, my_id, message, refs, duration="(since 00h 00m 00s)"): <NEW_LINE> <INDENT> super(RemoteEvent, self).__init__(message, refs, duration) <NEW_LINE> self.id = my_id <NEW_LINE> self._progress = 0.0 <NEW_LINE> self._refresh() <NEW_LINE> <DEDENT> def set_progress(self, progress): <NEW_LINE> <INDENT> self._progress = progress <NEW_LINE> self._refresh() <NEW_LINE> <DEDENT> @property <NEW_LINE> def progress(self): <NEW_LINE> <INDENT> return self._progress
An event that was published by another module: we know nothing about this, rely on the other module to continuously update us with progress information as it emerges.
6259906c76e4537e8c3f0d9e
class Random: <NEW_LINE> <INDENT> @commands.command(aliases=["cflip", "coinflip"]) <NEW_LINE> @commands.cooldown(6, 12, commands.BucketType.channel) <NEW_LINE> async def coin(self, ctx): <NEW_LINE> <INDENT> choice = systemrandom.choice(SIDES_COIN) <NEW_LINE> logger.info(f"Flipped a coin; it's {choice}") <NEW_LINE> await ctx.send(choice) <NEW_LINE> <DEDENT> @commands.command(aliases=["randint"]) <NEW_LINE> @commands.cooldown(6, 12, commands.BucketType.channel) <NEW_LINE> async def rng(self, ctx, start:int=1, end:int=100): <NEW_LINE> <INDENT> if start > end: <NEW_LINE> <INDENT> start, end = end, start <NEW_LINE> <DEDENT> number = systemrandom.randint(start, end) <NEW_LINE> message = f"{number} (random number from {start} to {end})" <NEW_LINE> logger.info(message) <NEW_LINE> await ctx.send(message) <NEW_LINE> <DEDENT> @commands.command(aliases=["rword", "randword"]) <NEW_LINE> @commands.cooldown(6, 12, commands.BucketType.channel) <NEW_LINE> async def rwg(self, ctx): <NEW_LINE> <INDENT> async with ctx.bot.session.get(URL_RANDOM_WORD_API) as response: <NEW_LINE> <INDENT> if response.status == 200: <NEW_LINE> <INDENT> word = await response.text() <NEW_LINE> await ctx.send(word) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = "Could not reach API. x.x" <NEW_LINE> await ctx.send(message) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @commands.command() <NEW_LINE> @commands.cooldown(6, 12, commands.BucketType.channel) <NEW_LINE> async def roll(self, ctx, *expressions): <NEW_LINE> <INDENT> rolls = [] <NEW_LINE> paginator = commands.Paginator() <NEW_LINE> counter = 0 <NEW_LINE> for expression in expressions: <NEW_LINE> <INDENT> if counter >= MAX_ROLL_COUNT: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> elif REGEX_OBJECT_DND.fullmatch(expression): <NEW_LINE> <INDENT> expression_parts = re.split(REGEX_DND_SPLIT, expression) <NEW_LINE> roll = [int(value) for value in expression_parts] <NEW_LINE> if roll[0] > MAX_DICE_PER_ROLL or roll[1] > MAX_DIE_SIZE: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif roll[1] > 1 and roll[0] >= 1: <NEW_LINE> <INDENT> outcomes = [] <NEW_LINE> for times in range(0, roll[0]): <NEW_LINE> <INDENT> outcome = systemrandom.randint(1, roll[1]) <NEW_LINE> outcomes.append(outcome) <NEW_LINE> <DEDENT> outcomes_string = ", ".join((str(value) for value in outcomes)) <NEW_LINE> rolls.append(f"{expression}: {outcomes_string} ({sum(outcomes)})") <NEW_LINE> counter += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(rolls) > 0: <NEW_LINE> <INDENT> for roll in rolls: <NEW_LINE> <INDENT> paginator.add_line(roll) <NEW_LINE> <DEDENT> for page in paginator.pages: <NEW_LINE> <INDENT> await ctx.send(page) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise commands.UserInputError(("No valid rolls supplied. " f"Please use D&D format, e.g. `5d6`.\n" "Individual rolls cannot have more than " f"`{MAX_DICE_PER_ROLL}` dice, and dice cannot have " f"more than `{MAX_DIE_SIZE}` sides."))
Commands that generate things at random.
6259906ca17c0f6771d5d7b6
class RegisterView(View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> return render(request, 'user/reg.html') <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> data = request.POST <NEW_LINE> form = RegisterModelForm(data) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> cleaned_data = form.cleaned_data <NEW_LINE> user = Users() <NEW_LINE> user.phone = cleaned_data.get('phone') <NEW_LINE> user.password = set_password(cleaned_data.get('password')) <NEW_LINE> user.save() <NEW_LINE> return redirect('user:登录') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render(request, 'user/reg.html', context={'form': form})
注册视图
6259906c44b2445a339b756d
class get_a_map_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.MAP, 'success', (TType.STRING,None,TType.DOUBLE,None), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.MAP: <NEW_LINE> <INDENT> self.success = {} <NEW_LINE> (_ktype8, _vtype9, _size7 ) = iprot.readMapBegin() <NEW_LINE> for _i11 in xrange(_size7): <NEW_LINE> <INDENT> _key12 = iprot.readString(); <NEW_LINE> _val13 = iprot.readDouble(); <NEW_LINE> self.success[_key12] = _val13 <NEW_LINE> <DEDENT> iprot.readMapEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('get_a_map_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.MAP, 0) <NEW_LINE> oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(self.success)) <NEW_LINE> for kiter14,viter15 in self.success.items(): <NEW_LINE> <INDENT> oprot.writeString(kiter14) <NEW_LINE> oprot.writeDouble(viter15) <NEW_LINE> <DEDENT> oprot.writeMapEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.success) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
6259906c097d151d1a2c288b
class EventManager: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> from weakref import WeakKeyDictionary <NEW_LINE> self.listeners = WeakKeyDictionary() <NEW_LINE> <DEDENT> def register_listener(self, listener): <NEW_LINE> <INDENT> self.listeners[listener] = 1 <NEW_LINE> <DEDENT> def unregister_listener(self, listener): <NEW_LINE> <INDENT> if listener in self.listeners: <NEW_LINE> <INDENT> del self.listeners[listener] <NEW_LINE> <DEDENT> <DEDENT> def post(self, event): <NEW_LINE> <INDENT> for listener in self.listeners: <NEW_LINE> <INDENT> listener.notify(event)
This class is responsible for coordinating most communication between the game systems. This class and the idea of event driven architecture was taken from `sjbrown's Writing Games Tutorial <http://ezide.com/games/writing-games.html>`_. :Attributes: - *listener* (): registered listeners
6259906c3317a56b869bf151
class AzKVNonRecoverableEventTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_com_bucket_missing(self): <NEW_LINE> <INDENT> record = copy.deepcopy(base_record) <NEW_LINE> record['com'] = None <NEW_LINE> plugin = azkvnonrecoverableevent. AzKVNonRecoverableEvent() <NEW_LINE> events = list(plugin.eval(record)) <NEW_LINE> self.assertEqual(events, []) <NEW_LINE> <DEDENT> def test_com_bucket_cloud_type_non_azure(self): <NEW_LINE> <INDENT> record = copy.deepcopy(base_record) <NEW_LINE> record['com']['cloud_type'] = 'non_azure' <NEW_LINE> plugin = azkvnonrecoverableevent. AzKVNonRecoverableEvent() <NEW_LINE> events = list(plugin.eval(record)) <NEW_LINE> self.assertEqual(events, []) <NEW_LINE> <DEDENT> def test_ext_bucket_missing(self): <NEW_LINE> <INDENT> record = copy.deepcopy(base_record) <NEW_LINE> record['ext'] = None <NEW_LINE> plugin = azkvnonrecoverableevent. AzKVNonRecoverableEvent() <NEW_LINE> events = list(plugin.eval(record)) <NEW_LINE> self.assertEqual(events, []) <NEW_LINE> <DEDENT> def test_ext_bucket_record_type_non_key_vault(self): <NEW_LINE> <INDENT> record = copy.deepcopy(base_record) <NEW_LINE> record['ext']['record_type'] = 'non_key_vault' <NEW_LINE> plugin = azkvnonrecoverableevent. AzKVNonRecoverableEvent() <NEW_LINE> events = list(plugin.eval(record)) <NEW_LINE> self.assertEqual(events, []) <NEW_LINE> <DEDENT> def test_key_vault_recoverable(self): <NEW_LINE> <INDENT> record = copy.deepcopy(base_record) <NEW_LINE> record['ext']['recoverable'] = True <NEW_LINE> plugin = azkvnonrecoverableevent. AzKVNonRecoverableEvent() <NEW_LINE> events = list(plugin.eval(record)) <NEW_LINE> self.assertEqual(events, []) <NEW_LINE> <DEDENT> def test_key_vault_non_recoverable(self): <NEW_LINE> <INDENT> record = copy.deepcopy(base_record) <NEW_LINE> plugin = azkvnonrecoverableevent. AzKVNonRecoverableEvent() <NEW_LINE> events = list(plugin.eval(record)) <NEW_LINE> self.assertEqual(len(events), 1) <NEW_LINE> self.assertEqual(events[0]['ext']['record_type'], 'key_vault_non_recoverable_event') <NEW_LINE> self.assertEqual(events[0]['com']['record_type'], 'key_vault_non_recoverable_event')
Tests for AzKVNonRecoverableEvent plugin.
6259906cb7558d5895464b3f
class UserDetailsChanging(UserSettingsFormHandler): <NEW_LINE> <INDENT> form_class = UserDetailsChangingForm <NEW_LINE> form_name = 'user_details_form' <NEW_LINE> redirect_view_name = 'user_settings' <NEW_LINE> success_message = _('changes saved') <NEW_LINE> @method_decorator(login_required) <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> return super().post(request, data=request.POST, instance=request.user)
A view to handle the form that in charge of changing the fields of the User model (excluding password).
6259906cd6c5a102081e3946
class Favourites(models.Model): <NEW_LINE> <INDENT> profile = models.ForeignKey(Profile, on_delete=models.CASCADE, null=True) <NEW_LINE> article = models.ForeignKey( Article, related_name="article_id", on_delete=models.CASCADE, null=True) <NEW_LINE> favourite = models.BooleanField(default=False)
field contains id of user who has favourited an article
6259906ce76e3b2f99fda21d
@total_ordering <NEW_LINE> class AgentNode: <NEW_LINE> <INDENT> def __init__(self, agent: Agent, polarity: Polarity) -> None: <NEW_LINE> <INDENT> self.agent = agent <NEW_LINE> self.polarity = polarity <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return f'{self.agent}({self.polarity.value})' <NEW_LINE> <DEDENT> def __hash__(self) -> int: <NEW_LINE> <INDENT> return hash((self.agent.agent_id, self.polarity)) <NEW_LINE> <DEDENT> def __eq__(self, other: AgentNode) -> bool: <NEW_LINE> <INDENT> return all(self._agent_node_eq_comparison(other)) <NEW_LINE> <DEDENT> def _agent_node_eq_comparison( self, other: AgentNode ) -> Generator[bool, None, None]: <NEW_LINE> <INDENT> yield isinstance(other, self.__class__) <NEW_LINE> yield self.agent == other.agent <NEW_LINE> yield self.polarity == other.polarity <NEW_LINE> <DEDENT> def __lt__(self, other: AgentNode) -> bool: <NEW_LINE> <INDENT> if self.agent == other.agent: <NEW_LINE> <INDENT> return ( self.polarity == Polarity.POSITIVE ) and ( other.polarity == Polarity.NEGATIVE ) <NEW_LINE> <DEDENT> return self.agent < other.agent
Agent nodes split into positive and negative component nodes. An edge is drawn between them when the graph is built.
6259906c4a966d76dd5f0707
class AiNameCulturalContext(object): <NEW_LINE> <INDENT> swagger_types = { 'language': 'str', 'location': 'str', 'script': 'str', 'encoding': 'str', 'style': 'str' } <NEW_LINE> attribute_map = { 'language': 'language', 'location': 'location', 'script': 'script', 'encoding': 'encoding', 'style': 'style' } <NEW_LINE> def __init__(self, language: str = None, location: str = None, script: str = None, encoding: str = None, style: str = None): <NEW_LINE> <INDENT> self._language = None <NEW_LINE> self._location = None <NEW_LINE> self._script = None <NEW_LINE> self._encoding = None <NEW_LINE> self._style = None <NEW_LINE> if language is not None: <NEW_LINE> <INDENT> self.language = language <NEW_LINE> <DEDENT> if location is not None: <NEW_LINE> <INDENT> self.location = location <NEW_LINE> <DEDENT> if script is not None: <NEW_LINE> <INDENT> self.script = script <NEW_LINE> <DEDENT> if encoding is not None: <NEW_LINE> <INDENT> self.encoding = encoding <NEW_LINE> <DEDENT> if style is not None: <NEW_LINE> <INDENT> self.style = style <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def language(self) -> str: <NEW_LINE> <INDENT> return self._language <NEW_LINE> <DEDENT> @language.setter <NEW_LINE> def language(self, language: str): <NEW_LINE> <INDENT> self._language = language <NEW_LINE> <DEDENT> @property <NEW_LINE> def location(self) -> str: <NEW_LINE> <INDENT> return self._location <NEW_LINE> <DEDENT> @location.setter <NEW_LINE> def location(self, location: str): <NEW_LINE> <INDENT> self._location = location <NEW_LINE> <DEDENT> @property <NEW_LINE> def script(self) -> str: <NEW_LINE> <INDENT> return self._script <NEW_LINE> <DEDENT> @script.setter <NEW_LINE> def script(self, script: str): <NEW_LINE> <INDENT> self._script = script <NEW_LINE> <DEDENT> @property <NEW_LINE> def encoding(self) -> str: <NEW_LINE> <INDENT> return self._encoding <NEW_LINE> <DEDENT> @encoding.setter <NEW_LINE> def encoding(self, encoding: str): <NEW_LINE> <INDENT> self._encoding = encoding <NEW_LINE> <DEDENT> @property <NEW_LINE> def style(self) -> str: <NEW_LINE> <INDENT> return self._style <NEW_LINE> <DEDENT> @style.setter <NEW_LINE> def style(self, style: str): <NEW_LINE> <INDENT> if style is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `style`, must not be `None`") <NEW_LINE> <DEDENT> self._style = style <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AiNameCulturalContext): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
AiName cultural context
6259906c01c39578d7f14343
class PauseTask(Routine): <NEW_LINE> <INDENT> def handle(self, scheduler, task): <NEW_LINE> <INDENT> log.debug("Pausing task %r.", task) <NEW_LINE> return True
Schedule the task to resume later through explicit resume.
6259906c4527f215b58eb5ae
class Metadata(QuantumAPIDictWrapper): <NEW_LINE> <INDENT> _attrs = ['name', 'id', 'value', 'image_map_id'] <NEW_LINE> def __init__(self, apiresource): <NEW_LINE> <INDENT> super(Metadata, self).__init__(apiresource)
Wrapper for quantum metadatas
6259906c91f36d47f2231a9d
class Zone(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, 'max_number_of_record_sets': {'readonly': True}, 'number_of_record_sets': {'readonly': True}, 'name_servers': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'max_number_of_record_sets': {'key': 'properties.maxNumberOfRecordSets', 'type': 'long'}, 'number_of_record_sets': {'key': 'properties.numberOfRecordSets', 'type': 'long'}, 'name_servers': {'key': 'properties.nameServers', 'type': '[str]'}, } <NEW_LINE> def __init__(self, location, tags=None, etag=None): <NEW_LINE> <INDENT> super(Zone, self).__init__(location=location, tags=tags) <NEW_LINE> self.etag = etag <NEW_LINE> self.max_number_of_record_sets = None <NEW_LINE> self.number_of_record_sets = None <NEW_LINE> self.name_servers = None
Describes a DNS zone. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :param location: Resource location. :type location: str :param tags: Resource tags. :type tags: dict[str, str] :param etag: The etag of the zone. :type etag: str :ivar max_number_of_record_sets: The maximum number of record sets that can be created in this DNS zone. This is a read-only property and any attempt to set this value will be ignored. :vartype max_number_of_record_sets: long :ivar number_of_record_sets: The current number of record sets in this DNS zone. This is a read-only property and any attempt to set this value will be ignored. :vartype number_of_record_sets: long :ivar name_servers: The name servers for this DNS zone. This is a read-only property and any attempt to set this value will be ignored. :vartype name_servers: list[str]
6259906c460517430c432c64
class Message(RawMessage): <NEW_LINE> <INDENT> def encode(self, value): <NEW_LINE> <INDENT> return base64.b64encode(value) <NEW_LINE> <DEDENT> def decode(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = base64.b64decode(value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise SQSDecodeError('Unable to decode message', self) <NEW_LINE> <DEDENT> return value
The default Message class used for SQS queues. This class automatically encodes/decodes the message body using Base64 encoding to avoid any illegal characters in the message body. See: http://developer.amazonwebservices.com/connect/thread.jspa?messageID=49680%EC%88%90 for details on why this is a good idea. The encode/decode is meant to be transparent to the end-user.
6259906ce5267d203ee6cfcc
class Policy(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> @abc.abstractmethod <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def restore(self, checkpoint): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def sample_action(self, obs, explore_prob): <NEW_LINE> <INDENT> raise NotImplementedError
Base policy abstraction. Subclasses should implement `reset` and `sample_action` methods to ensure compatibility with the train_collect_eval function.
6259906c0c0af96317c5796d
class Dog(Animal): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name
This is a Dog class
6259906cac7a0e7691f73d04
class ProxyRequest(http.Request): <NEW_LINE> <INDENT> protocols = {'http': ProxyClientFactory} <NEW_LINE> ports = {'http': 80} <NEW_LINE> def process(self): <NEW_LINE> <INDENT> parsed = urlparse.urlparse(self.uri) <NEW_LINE> protocol = parsed[0] <NEW_LINE> host = parsed[1] <NEW_LINE> port = self.ports[protocol] <NEW_LINE> if ':' in host: <NEW_LINE> <INDENT> host, port = host.split(':') <NEW_LINE> port = int(port) <NEW_LINE> <DEDENT> rest = urlparse.urlunparse(('','')+parsed[2:]) <NEW_LINE> if not rest: <NEW_LINE> <INDENT> rest = rest+'/' <NEW_LINE> <DEDENT> class_ = self.protocols[protocol] <NEW_LINE> headers = self.getAllHeaders().copy() <NEW_LINE> if not headers.has_key('host'): <NEW_LINE> <INDENT> headers['host'] = host <NEW_LINE> <DEDENT> self.content.seek(0, 0) <NEW_LINE> s = self.content.read() <NEW_LINE> clientFactory = class_(self.method, rest, self.clientproto, headers, s, self) <NEW_LINE> reactor.connectTCP(host, port, clientFactory)
Used by Proxy to implement a simple web proxy.
6259906c4c3428357761bad0
class LifecycleManager: <NEW_LINE> <INDENT> configManager = None <NEW_LINE> toreeManager = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.configManager = ConfigManager() <NEW_LINE> self.toreeManager = ToreeManager() <NEW_LINE> <DEDENT> def _reserve_profile(self): <NEW_LINE> <INDENT> profilesFolder = self.configManager.getProfilesFolder() <NEW_LINE> profile = None <NEW_LINE> for (path, dirs, files) in os.walk(profilesFolder): <NEW_LINE> <INDENT> for folderName in dirs: <NEW_LINE> <INDENT> profile = Profile(profilesFolder + '/' + folderName) <NEW_LINE> if profile.isAvailable(): <NEW_LINE> <INDENT> profile.reserve() <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> profile = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> """Unlock the mutex enabling other processes to select same kernel config""" <NEW_LINE> return profile <NEW_LINE> <DEDENT> def _release_profile(self, profile): <NEW_LINE> <INDENT> profile.release() <NEW_LINE> <DEDENT> def start_toree(self): <NEW_LINE> <INDENT> profile = self._reserve_profile() <NEW_LINE> if profile is None: <NEW_LINE> <INDENT> raise RuntimeError('No Toree slot available.') <NEW_LINE> <DEDENT> self.toreeManager.start_toree(profile) <NEW_LINE> return profile <NEW_LINE> <DEDENT> def stop_toree(self, profile): <NEW_LINE> <INDENT> self.toreeManager.stop_toree(profile) <NEW_LINE> self._release_profile(profile)
A Orchestrator for Toree Lifecycle which select an available toree sloot, reserv it and start/stop when notebooks are started/stopped. Open slots are identified by a not having a toree.pid. In case of corruption or a requirement to kill the toree process, one should also remove the toree.id from the specific Toree slot.
6259906c71ff763f4b5e8fc4
class Voc: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.trimmed = False <NEW_LINE> self.word2index = {} <NEW_LINE> self.word2count = {} <NEW_LINE> self.index2word = { PAD_token: "PAD", SOS_token: "SOS", EOS_token: "EOS"} <NEW_LINE> self.num_words = 3 <NEW_LINE> <DEDENT> def addSentence(self, sentence): <NEW_LINE> <INDENT> for word in sentence.split(' '): <NEW_LINE> <INDENT> self.addWord(word) <NEW_LINE> <DEDENT> <DEDENT> def addWord(self, word): <NEW_LINE> <INDENT> if word not in self.word2index: <NEW_LINE> <INDENT> self.word2index[word] = self.num_words <NEW_LINE> self.word2count[word] = 1 <NEW_LINE> self.index2word[self.num_words] = word <NEW_LINE> self.num_words += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.word2count[word] += 1 <NEW_LINE> <DEDENT> <DEDENT> def trim(self, min_count): <NEW_LINE> <INDENT> if self.trimmed: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.trimmed = True <NEW_LINE> keep_words = [] <NEW_LINE> for k, v in self.word2count.items(): <NEW_LINE> <INDENT> if v >= min_count: <NEW_LINE> <INDENT> keep_words.append(k) <NEW_LINE> <DEDENT> <DEDENT> print('keep_words {} / {} = {:.4f}'.format(len(keep_words), len(self.word2index), len(keep_words) / len( self.word2index))) <NEW_LINE> self.word2index = {} <NEW_LINE> self.word2count = {} <NEW_LINE> self.index2word = { PAD_token: "PAD", SOS_token: "SOS", EOS_token: "EOS"} <NEW_LINE> self.num_words = 3 <NEW_LINE> for word in keep_words: <NEW_LINE> <INDENT> self.addWord(word)
dictionary class
6259906cf548e778e596cda9
class WidgetContainerListContentsMixin(object): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> return xrange(len(self.contents)) <NEW_LINE> <DEDENT> def __reversed__(self): <NEW_LINE> <INDENT> return xrange(len(self.contents) - 1, -1, -1)
Mixin class for widget containers whose positions are indexes into a list available as self.contents.
6259906c8a43f66fc4bf39b0
class Duplex(Enum, IComparable, IFormattable, IConvertible): <NEW_LINE> <INDENT> def __eq__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __format__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ge__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __gt__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __le__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __lt__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __ne__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __reduce_ex__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Default = None <NEW_LINE> Horizontal = None <NEW_LINE> Simplex = None <NEW_LINE> value__ = None <NEW_LINE> Vertical = None
Specifies the printer's duplex setting. enum Duplex, values: Default (-1), Horizontal (3), Simplex (1), Vertical (2)
6259906c63d6d428bbee3e98
class Tag(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['name'] <NEW_LINE> verbose_name = _('tag') <NEW_LINE> verbose_name_plural = _('tags') <NEW_LINE> <DEDENT> NAME_LENGTH = 64 <NEW_LINE> name = models.CharField(max_length=NAME_LENGTH, db_index=True, unique=True, blank=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return bool(self.name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def entries_count(self): <NEW_LINE> <INDENT> return self.versions.count('entry_id', distinct=True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def events_count(self): <NEW_LINE> <INDENT> return self.events.count() <NEW_LINE> <DEDENT> def save(self, force_insert=False, force_update=False, using=None, update_fields=None): <NEW_LINE> <INDENT> self.name = self.name.strip()[:self.NAME_LENGTH].lower() <NEW_LINE> if self.name: <NEW_LINE> <INDENT> self.name = bleach.clean(self.name, tags=[], strip=True) <NEW_LINE> super().save(force_insert, force_update, using, update_fields) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.pk: <NEW_LINE> <INDENT> self.delete() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def as_link(self): <NEW_LINE> <INDENT> from palanaeum.search import TagSearchFilter <NEW_LINE> url = reverse('advanced_search') + '?' + urlencode({ TagSearchFilter.GET_TAG_SEARCH: self.name }) <NEW_LINE> return "<a href='{}'>#{}</a>".format(url, self.name) <NEW_LINE> <DEDENT> def as_selected_option(self): <NEW_LINE> <INDENT> return "<option value='{0}' selected='selected'>{0} ({1})</option>".format(escape(self.name), self.get_usage_count()) <NEW_LINE> <DEDENT> TAG_SANITIZER = re.compile(r"[^A-Za-z0-9'\-_\s]") <NEW_LINE> @staticmethod <NEW_LINE> def get_tag(name: str): <NEW_LINE> <INDENT> if not name: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> name = name.strip().lower() <NEW_LINE> name = Tag.TAG_SANITIZER.sub("", name) <NEW_LINE> name = " ".join(name.split()) <NEW_LINE> name = name[:Tag.NAME_LENGTH] <NEW_LINE> try: <NEW_LINE> <INDENT> return Tag.objects.get(name__iexact=name) <NEW_LINE> <DEDENT> except Tag.DoesNotExist: <NEW_LINE> <INDENT> request = get_request() <NEW_LINE> if request.user.is_staff: <NEW_LINE> <INDENT> return Tag.objects.create(name=name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise PermissionDenied() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def clean_unused(): <NEW_LINE> <INDENT> return Tag.objects.filter(events=None).filter(versions=None).delete() <NEW_LINE> <DEDENT> def get_usage_count(self): <NEW_LINE> <INDENT> cache = caches['default'] <NEW_LINE> cached_stats = cache.get('tag_usage_stats') <NEW_LINE> if cached_stats and self.pk in cached_stats: <NEW_LINE> <INDENT> return cached_stats[self.pk] <NEW_LINE> <DEDENT> stats = Tag.objects.annotate(events_count=Count('events'), entries_count=Count('versions__entry_id', distinct=True)) .values_list('id', 'events_count', 'entries_count') <NEW_LINE> stats = {s[0]: s[1] + s[2] for s in stats} <NEW_LINE> cache.set('tag_usage_stats', stats) <NEW_LINE> return stats.get(self.id, 0)
A single tag consists of a series of letters, without any space between them. Tags are case insensitive.
6259906c76e4537e8c3f0da0
class ChefClientRunner(ChefRunner): <NEW_LINE> <INDENT> cmdline_options = [ ('-r', '--rewrite_runlist', {}) ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(ChefClientRunner, self).__init__() <NEW_LINE> self.cmdline_options += ChefRunner.cmdline_options <NEW_LINE> self.chef_binary = 'chef-client'
ChefRunner type implementation. Invokes chef-client binary with given arguments.
6259906c99fddb7c1ca639df
class Page: <NEW_LINE> <INDENT> request = None <NEW_LINE> def __init__(self, name, title, text): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.title = title <NEW_LINE> self.text = text <NEW_LINE> self.context = {} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, name, input): <NEW_LINE> <INDENT> if isinstance(input, str): <NEW_LINE> <INDENT> input = StringIO(input) <NEW_LINE> <DEDENT> message = Parser().parse(input) <NEW_LINE> title = message["title"] <NEW_LINE> text = message.get_payload() <NEW_LINE> return Page(name, title, text) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def content_root(cls): <NEW_LINE> <INDENT> result = getattr(settings, "ABOUT_CONTENT_ROOT", None) <NEW_LINE> if result: <NEW_LINE> <INDENT> return Path(result) <NEW_LINE> <DEDENT> config = apps.get_app_config("about") <NEW_LINE> return Path(config.path) / "content" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_with_name(cls, name): <NEW_LINE> <INDENT> path = cls.content_root() / ((name or "index") + ".mmd") <NEW_LINE> with open(path, "r", encoding="UTF-8") as f: <NEW_LINE> <INDENT> return cls.parse(name, f) <NEW_LINE> <DEDENT> <DEDENT> def set_context(self, context, request=None): <NEW_LINE> <INDENT> self.context.update(context) <NEW_LINE> if request is not None: <NEW_LINE> <INDENT> self.request = request <NEW_LINE> <DEDENT> <DEDENT> @mark_safe <NEW_LINE> def formatted(self): <NEW_LINE> <INDENT> django_engine = engines["django"] <NEW_LINE> template = django_engine.from_string(self.text) <NEW_LINE> text = template.render(self.context) <NEW_LINE> formatted = markdown.markdown(text, output="HTML5") <NEW_LINE> pos = formatted.find("</h1>") <NEW_LINE> if pos >= 0: <NEW_LINE> <INDENT> formatted = ( formatted[: pos + 5] + "\n<div>" + formatted[pos + 5 :] + "\n</div>" ) <NEW_LINE> <DEDENT> return formatted
One page of text that can be displayed.
6259906c16aa5153ce401cf7
class TailCommand(Command): <NEW_LINE> <INDENT> def __init__(self, name='tail', topic='shell', brief='display the last lines of a file', **kwargs): <NEW_LINE> <INDENT> self.parser = PypsiArgParser( prog=name, description=brief, usage=TailCmdUsage ) <NEW_LINE> self.parser.add_argument( 'input_file', help='file to display', metavar="FILE", completer=self.complete_path ) <NEW_LINE> self.parser.add_argument( '-n', '--lines', metavar="N", type=int, default=10, help="number of lines to display" ) <NEW_LINE> self.parser.add_argument( '-f', '--follow', help="continue to output as file grows", action='store_true' ) <NEW_LINE> super().__init__( name=name, usage=self.parser.format_help(), topic=topic, brief=brief, **kwargs ) <NEW_LINE> <DEDENT> def run(self, shell, args): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ns = self.parser.parse_args(args) <NEW_LINE> <DEDENT> except CommandShortCircuit as e: <NEW_LINE> <INDENT> return e.code <NEW_LINE> <DEDENT> if not os.path.isfile(ns.input_file): <NEW_LINE> <INDENT> self.error(shell, "invalid file path: ", ns.input_file, "\n") <NEW_LINE> return -1 <NEW_LINE> <DEDENT> last_lines = self.tail(ns.input_file, ns.lines) <NEW_LINE> for line in last_lines: <NEW_LINE> <INDENT> print(line) <NEW_LINE> <DEDENT> print() <NEW_LINE> if ns.follow: <NEW_LINE> <INDENT> self.follow_file(ns.input_file) <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> def complete_path(self, shell, args, prefix): <NEW_LINE> <INDENT> return path_completer(args[-1], prefix=prefix) <NEW_LINE> <DEDENT> def complete(self, shell, args, prefix): <NEW_LINE> <INDENT> return command_completer(self.parser, shell, args, prefix) <NEW_LINE> <DEDENT> def tail(self, fname, lines=10, block_size=1024): <NEW_LINE> <INDENT> data = [] <NEW_LINE> blocks = -1 <NEW_LINE> num_lines = 0 <NEW_LINE> with safe_open(fname, 'r') as fp: <NEW_LINE> <INDENT> fp.seek(0, 2) <NEW_LINE> num_bytes = fp.tell() <NEW_LINE> bytes_left = num_bytes <NEW_LINE> while num_lines < lines and bytes_left > 0: <NEW_LINE> <INDENT> if bytes_left - block_size > 0: <NEW_LINE> <INDENT> fp.seek(num_bytes - (blocks * block_size)) <NEW_LINE> data.insert(0, fp.read(block_size)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fp.seek(0, 0) <NEW_LINE> data.insert(0, fp.read(num_bytes)) <NEW_LINE> <DEDENT> num_lines = data[0].count('\n') <NEW_LINE> bytes_left -= block_size <NEW_LINE> blocks -= 1 <NEW_LINE> <DEDENT> return ''.join(data).splitlines()[-lines:] <NEW_LINE> <DEDENT> <DEDENT> def follow_file(self, fname): <NEW_LINE> <INDENT> with safe_open(fname, 'r') as fp: <NEW_LINE> <INDENT> fp.seek(0, 2) <NEW_LINE> try: <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> where = fp.tell() <NEW_LINE> line = fp.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> fp.seek(where) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(line, end='', flush=True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> print() <NEW_LINE> return 0 <NEW_LINE> <DEDENT> <DEDENT> return 0
Displays the last N lines of a file to the screen.
6259906c3317a56b869bf152
class LogisticAT(base.BaseEstimator): <NEW_LINE> <INDENT> def __init__(self, alpha=1., verbose=0, max_iter=1000): <NEW_LINE> <INDENT> self.alpha = alpha <NEW_LINE> self.verbose = verbose <NEW_LINE> self.max_iter = max_iter <NEW_LINE> <DEDENT> def fit(self, X, y): <NEW_LINE> <INDENT> _y = np.array(y).astype(np.int) <NEW_LINE> if np.abs(_y - y).sum() > 0.1: <NEW_LINE> <INDENT> raise ValueError('y must only contain integer values') <NEW_LINE> <DEDENT> self.classes_ = np.unique(y) <NEW_LINE> self.n_class_ = self.classes_.max() - self.classes_.min() + 1 <NEW_LINE> y_tmp = y - y.min() <NEW_LINE> self.coef_, self.theta_ = threshold_fit( X, y_tmp, self.alpha, self.n_class_, mode='AE', verbose=self.verbose, max_iter=self.max_iter) <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> return threshold_predict(X, self.coef_, self.theta_) + self.classes_.min() <NEW_LINE> <DEDENT> def score(self, X, y): <NEW_LINE> <INDENT> pred = self.predict(X) <NEW_LINE> return - metrics.mean_absolute_error(pred, y)
Classifier that implements the ordinal logistic model (All-Threshold variant) Parameters ---------- alpha: float Regularization parameter. Zero is no regularization, higher values increate the squared l2 regularization. References ---------- J. D. M. Rennie and N. Srebro, "Loss Functions for Preference Levels : Regression with Discrete Ordered Labels," in Proceedings of the IJCAI Multidisciplinary Workshop on Advances in Preference Handling, 2005.
6259906c3346ee7daa33826d
class GetManager(generics.RetrieveAPIView): <NEW_LINE> <INDENT> queryset = Manager.objects.all() <NEW_LINE> serializer_class = MangerSerializer
Retrive a particular Manager by its id
6259906ce1aae11d1e7cf41c
class TankAnt(BodyguardAnt): <NEW_LINE> <INDENT> name = 'Tank' <NEW_LINE> damage = 1 <NEW_LINE> food_cost = 6 <NEW_LINE> implemented = True <NEW_LINE> def action(self, colony): <NEW_LINE> <INDENT> BodyguardAnt.action(self, colony) <NEW_LINE> for bee in self.place.bees[:]: <NEW_LINE> <INDENT> bee.reduce_armor(TankAnt.damage)
TankAnt provides both offensive and defensive capabilities.
6259906c3cc13d1c6d466f64
class AbakusTask(celery.Task): <NEW_LINE> <INDENT> def apply_async(self, args=None, kwargs=None, *arguments, **keyword_arguments): <NEW_LINE> <INDENT> logger = log.bind() <NEW_LINE> logger_context = dict(get_context(logger)._dict) <NEW_LINE> kwargs["logger_context"] = logger_context <NEW_LINE> async_result = super().apply_async( args, kwargs, *arguments, **keyword_arguments ) <NEW_LINE> log.info("async_task_created", task_id=async_result.id, task_name=self.name) <NEW_LINE> return async_result <NEW_LINE> <DEDENT> def setup_logger(self, logger_context): <NEW_LINE> <INDENT> logger_context = logger_context or {} <NEW_LINE> logger_context["task_name"] = self.name <NEW_LINE> logger_context["task_id"] = self.request.id <NEW_LINE> self.logger = log.new(**logger_context)
This base task supplies the logger_context to the underlying worker. > @celery_app.task(bind=True, base=AbakusTask) > def task_name(self, logger_context=None): > self.setup_logger(logger_context) > other work...
6259906c5fc7496912d48e77
class FPSBox(overlay.TextOverlayElement): <NEW_LINE> <INDENT> def __init__(self, hud, pos): <NEW_LINE> <INDENT> super().__init__(None, "one million pounds", pos, Color(0,255,0,0)) <NEW_LINE> self.__counter = 0 <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.__counter == 0: <NEW_LINE> <INDENT> self.__counter = 50 <NEW_LINE> self.setLabel(str(Engine.getFPS())) <NEW_LINE> <DEDENT> self.__counter -= 1
Shows the current framerate
6259906c92d797404e38976a
class LambdaTransition(Transition): <NEW_LINE> <INDENT> def __init__(self, messages): <NEW_LINE> <INDENT> super(LambdaTransition, self).__init__(messages, None)
Transition subclass that does not change the user's state list in any way. Just sends mesages.
6259906c4428ac0f6e659d51
@skipIf(NO_MOCK, NO_MOCK_REASON) <NEW_LINE> class RsyncTestCase(TestCase): <NEW_LINE> <INDENT> def test_rsync(self): <NEW_LINE> <INDENT> with patch.dict(rsync.__salt__, {'config.option': MagicMock(return_value=False)}): <NEW_LINE> <INDENT> self.assertRaises(CommandExecutionError, rsync.rsync, False, False) <NEW_LINE> <DEDENT> with patch.dict(rsync.__salt__, {'config.option': MagicMock(return_value='A'), 'cmd.run_all': MagicMock(side_effect=[IOError('f'), 'A'])}): <NEW_LINE> <INDENT> with patch.object(rsync, '_check', return_value='A'): <NEW_LINE> <INDENT> self.assertRaises(CommandExecutionError, rsync.rsync, 'a', 'b') <NEW_LINE> self.assertEqual(rsync.rsync('src', 'dst'), 'A') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_version(self): <NEW_LINE> <INDENT> mock = MagicMock(side_effect=[IOError('f'), {'stdout': 'A B C\n'}]) <NEW_LINE> with patch.dict(rsync.__salt__, {'cmd.run_all': mock}): <NEW_LINE> <INDENT> self.assertRaises(CommandExecutionError, rsync.version) <NEW_LINE> self.assertEqual(rsync.version(), {'stdout': 'C'}) <NEW_LINE> <DEDENT> <DEDENT> def test_config(self): <NEW_LINE> <INDENT> mock_file = MagicMock(side_effect=[False, True, True]) <NEW_LINE> with patch.object(os.path, 'isfile', mock_file): <NEW_LINE> <INDENT> self.assertRaises(CommandExecutionError, rsync.config) <NEW_LINE> mock = MagicMock(side_effect=[IOError('f'), 'A']) <NEW_LINE> with patch.dict(rsync.__salt__, {'cmd.run_all': mock}): <NEW_LINE> <INDENT> self.assertRaises(CommandExecutionError, rsync.config) <NEW_LINE> self.assertEqual(rsync.config('confile'), 'A')
Test cases for salt.modules.rsync
6259906c4e4d562566373c25
class Documents(RTC_Client): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_by_date(cls, date, make_obj=False, sections=''): <NEW_LINE> <INDENT> endpoint = "documents.json" <NEW_LINE> begin_time = '%sT00:00:00' % date <NEW_LINE> end_time = '%sT23:59:59' % date <NEW_LINE> params = {'posted_at__gte':begin_time, 'posted_at__lte':end_time} <NEW_LINE> result = super(Documents, cls)._apicall(endpoint, sections, make_obj, **params) <NEW_LINE> return result['documents']
Holds links to various kinds of documents produced by agencies within Congress, or agencies outside of Congress regarding legislation and related matters. This collection currently contains only one kind of document: whip_notice - Daily and weekly digests of chamber schedules published by House leadership of each party.
6259906c7d847024c075dbfa
class RotateCanvasCCW(Operator): <NEW_LINE> <INDENT> bl_idname = "artist_paint.rotate_ccw_90" <NEW_LINE> bl_label = "Canvas Rotate CounterClockwise 90" <NEW_LINE> bl_options = {'REGISTER','UNDO'} <NEW_LINE> @classmethod <NEW_LINE> def poll(self, context): <NEW_LINE> <INDENT> obj = context.active_object <NEW_LINE> if obj is not None: <NEW_LINE> <INDENT> A = context.active_object.type == 'MESH' <NEW_LINE> B = pollAPT(self, context) <NEW_LINE> return A and B <NEW_LINE> <DEDENT> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> _bool01 = context.scene.canvas_in_frame <NEW_LINE> obj = context.active_object <NEW_LINE> _obName = obj.name <NEW_LINE> _camName = "Camera_" + _obName <NEW_LINE> select_mat = obj.data.materials[0].texture_slots[0]. texture.image.size[:] <NEW_LINE> if select_mat[0] >= select_mat[1]: <NEW_LINE> <INDENT> camRatio = select_mat[0]/select_mat[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> camRatio = select_mat[1]/select_mat[0] <NEW_LINE> <DEDENT> rnd = context.scene.render <NEW_LINE> if rnd.resolution_x==select_mat[0]: <NEW_LINE> <INDENT> rnd.resolution_x= select_mat[1] <NEW_LINE> rnd.resolution_y= select_mat[0] <NEW_LINE> <DEDENT> elif rnd.resolution_x==select_mat[1]: <NEW_LINE> <INDENT> rnd.resolution_x= select_mat[0] <NEW_LINE> rnd.resolution_y= select_mat[1] <NEW_LINE> <DEDENT> bpy.ops.paint.texture_paint_toggle() <NEW_LINE> bpy.ops.transform.rotate(value=1.5708, axis=(0, 0, 1), constraint_axis=(False, False, True), constraint_orientation='GLOBAL', mirror=False, proportional='DISABLED', proportional_edit_falloff='SMOOTH', proportional_size=1) <NEW_LINE> if _bool01 == True: <NEW_LINE> <INDENT> bpy.ops.view3d.camera_to_view_selected() <NEW_LINE> <DEDENT> bpy.ops.object.select_all(action='TOGGLE') <NEW_LINE> bpy.ops.object.select_all(action='DESELECT') <NEW_LINE> ob = bpy.data.objects[_obName] <NEW_LINE> ob.select = True <NEW_LINE> context.scene.objects.active = ob <NEW_LINE> bpy.ops.paint.texture_paint_toggle() <NEW_LINE> return {'FINISHED'}
Image Rotate CounterClockwise 90 Macro
6259906c2c8b7c6e89bd5004
class BaseModelMeta(metatools.MetaClass): <NEW_LINE> <INDENT> def __new__(mcs, name, bases, namespace): <NEW_LINE> <INDENT> decl_refs = namespace.get('_refs', []) <NEW_LINE> refs = {} <NEW_LINE> for ref in decl_refs: <NEW_LINE> <INDENT> refs[ref.field] = ref <NEW_LINE> <DEDENT> namespace['_refs'] = refs <NEW_LINE> namespace.setdefault('_fields', set()) <NEW_LINE> for base in mcs.iter_bases(bases): <NEW_LINE> <INDENT> mcs.inherit_set(base, namespace, '_fields') <NEW_LINE> mcs.inherit_dict(base, namespace, '_refs') <NEW_LINE> <DEDENT> return super(BaseModelMeta, mcs).__new__(mcs, name, bases, namespace)
Metaclass for class BaseModel. Uses the metatools package to allow for inheritance of field names, and translates lists of references into the dictionary needed by BaseModel.__getattr__().
6259906ca8370b77170f1be5
class SplitDateTimeField(forms.SplitDateTimeField): <NEW_LINE> <INDENT> def __init__(self, input_date_formats=None, input_time_formats=None, **kwargs): <NEW_LINE> <INDENT> input_date_formats = input_date_formats or DEFAULT_DATE_INPUT_FORMATS <NEW_LINE> super().__init__(input_date_formats=input_date_formats, input_time_formats=input_time_formats, **kwargs)
Split date and time input fields which use non-US date and time input formats by default.
6259906c167d2b6e312b819d
class TradeEnv(object): <NEW_LINE> <INDENT> defaults = { 'debug': 0, 'detail': 0, 'quiet': 0, 'dataDir': './data', } <NEW_LINE> def __init__(self, properties=None, **kwargs): <NEW_LINE> <INDENT> properties = properties or dict() <NEW_LINE> self.__dict__.update(self.defaults) <NEW_LINE> if properties: <NEW_LINE> <INDENT> self.__dict__.update(properties.__dict__) <NEW_LINE> <DEDENT> if kwargs: <NEW_LINE> <INDENT> self.__dict__.update(kwargs) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> if key.startswith("DEBUG"): <NEW_LINE> <INDENT> def __DEBUG_ENABLED(outText, *args, **kwargs): <NEW_LINE> <INDENT> print('#', outText.format(*args, **kwargs)) <NEW_LINE> <DEDENT> def __DEBUG_DISABLED(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> debugLevel = int(key[5:]) <NEW_LINE> if self.debug > debugLevel: <NEW_LINE> <INDENT> debugFn = __DEBUG_ENABLED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> debugFn = __DEBUG_DISABLED <NEW_LINE> <DEDENT> setattr(self, key, debugFn) <NEW_LINE> return debugFn <NEW_LINE> <DEDENT> if key == "NOTE": <NEW_LINE> <INDENT> def __NOTE_ENABLED(outText, *args, file=None, **kwargs): <NEW_LINE> <INDENT> print("NOTE:", outText.format(*args, **kwargs), file=file) <NEW_LINE> <DEDENT> def __NOTE_DISABLED(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if not self.quiet: <NEW_LINE> <INDENT> noteFn = __NOTE_ENABLED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> noteFn = __NOTE_DISABLED <NEW_LINE> <DEDENT> setattr(self, key, noteFn) <NEW_LINE> return noteFn <NEW_LINE> <DEDENT> if key == "WARN": <NEW_LINE> <INDENT> def _WARN_ENABLED(outText, *args, file=None, **kwargs): <NEW_LINE> <INDENT> print("WARNING:", outText.format(*args, **kwargs), file=file) <NEW_LINE> <DEDENT> def _WARN_DISABLED(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> noteFn = _WARN_DISABLED if self.quiet > 1 else _WARN_ENABLED <NEW_LINE> setattr(self, key, noteFn) <NEW_LINE> return noteFn <NEW_LINE> <DEDENT> return None
Container for a TradeDangerous "environment", which is a collection of operational parameters. To print debug lines, use DEBUG<N>, e.g. DEBUG0, which takes a format() string and parameters, e.g. DEBUG1("hello, {world}{}", "!", world="world") is equivalent to: if tdenv.debug >= 1: print("#hello, {world}{}".format( "!", world="world" )) Use "NOTE" to print remarks which can be disabled with -q.
6259906c4c3428357761bad2
class Genre(models.Model): <NEW_LINE> <INDENT> name = models.CharField( max_length=200, help_text='Enter a book genre (eg. Classic)') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Model representing a book genre here.
6259906c71ff763f4b5e8fc6
class AzureFirewallNetworkRuleCollection(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'priority': {'maximum': 65000, 'minimum': 100}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'priority': {'key': 'properties.priority', 'type': 'int'}, 'action': {'key': 'properties.action', 'type': 'AzureFirewallRCAction'}, 'rules': {'key': 'properties.rules', 'type': '[AzureFirewallNetworkRule]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(AzureFirewallNetworkRuleCollection, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = None <NEW_LINE> self.priority = kwargs.get('priority', None) <NEW_LINE> self.action = kwargs.get('action', None) <NEW_LINE> self.rules = kwargs.get('rules', None) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None)
Network rule collection resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param name: Gets name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :ivar etag: Gets a unique read-only string that changes whenever the resource is updated. :vartype etag: str :param priority: Priority of the network rule collection resource. :type priority: int :param action: The action type of a rule collection. :type action: ~azure.mgmt.network.v2018_07_01.models.AzureFirewallRCAction :param rules: Collection of rules used by a network rule collection. :type rules: list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRule] :param provisioning_state: The provisioning state of the resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :type provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState
6259906c2c8b7c6e89bd5005
class UnknownApiError(SpinRewriterApiError): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return (u'Unrecognized API error message received: {}'.format( self.api_error_msg))
Raised when API call results in an unrecognized error.
6259906ca219f33f346c8027
class Spellcheck(object): <NEW_LINE> <INDENT> sentence_break_regex = re.compile(r'(^|[.:;!?])\s*$') <NEW_LINE> _enchant_checkers = {} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _init_checker(self, lang='en_US'): <NEW_LINE> <INDENT> if lang == 'C': <NEW_LINE> <INDENT> lang = 'en_US' <NEW_LINE> <DEDENT> if not ENCHANT: <NEW_LINE> <INDENT> print_warning('(none): W: unable to init enchant, spellchecking disabled.') <NEW_LINE> return <NEW_LINE> <DEDENT> broker = Broker() <NEW_LINE> if not broker.dict_exists(lang): <NEW_LINE> <INDENT> print_warning(f'(none): W: unable to load spellchecking dictionary for {lang}.') <NEW_LINE> return <NEW_LINE> <DEDENT> if lang not in self._enchant_checkers: <NEW_LINE> <INDENT> checker = SpellChecker(lang, filters=[EmailFilter, URLFilter, WikiWordFilter]) <NEW_LINE> self._enchant_checkers[lang] = checker <NEW_LINE> <DEDENT> <DEDENT> def spell_check(self, text, fmt, lang='en_US', pkgname='', ignored_words=None): <NEW_LINE> <INDENT> warned = set() <NEW_LINE> suggestions = {} <NEW_LINE> if lang not in self._enchant_checkers: <NEW_LINE> <INDENT> self._init_checker(lang) <NEW_LINE> <DEDENT> if lang not in self._enchant_checkers: <NEW_LINE> <INDENT> return suggestions <NEW_LINE> <DEDENT> checker = self._enchant_checkers[lang] <NEW_LINE> if checker: <NEW_LINE> <INDENT> checker.set_text(re.sub(r'\s+', ' ', text)) <NEW_LINE> uppername = pkgname.upper() <NEW_LINE> upperparts = uppername.split('-') <NEW_LINE> if lang.startswith('en'): <NEW_LINE> <INDENT> ups = [x + "'S" for x in upperparts] <NEW_LINE> upperparts.extend(ups) <NEW_LINE> <DEDENT> if ignored_words: <NEW_LINE> <INDENT> ignored_words = [x.upper() for x in ignored_words] <NEW_LINE> <DEDENT> for err in checker: <NEW_LINE> <INDENT> if err.word in warned: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> warned.add(err.word) <NEW_LINE> if err.word[0].isupper() and not self.sentence_break_regex.search(checker.leading_context(3)): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> upperword = err.word.upper() <NEW_LINE> if err.word == upperword: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if ignored_words and upperword in ignored_words: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if upperword in uppername or upperword in upperparts: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if checker.leading_context(1).isdigit() or checker.trailing_context(1).isdigit(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> sug = ', '.join(checker.suggest()[:3]) <NEW_LINE> if sug: <NEW_LINE> <INDENT> sug = f'-> {sug}' <NEW_LINE> <DEDENT> suggestions[err.word] = fmt.format(lang) + f' {err.word} {sug}' <NEW_LINE> <DEDENT> return suggestions
The object containing current state of spellchecking used within rpmlint
6259906cfff4ab517ebcf03a
class Fixture(object): <NEW_LINE> <INDENT> __metaclass__ = registry.make_registry_metaclass(_FIXTURES) <NEW_LINE> REGISTERED_NAME = "Fixture" <NEW_LINE> def __init__(self, logger, job_num): <NEW_LINE> <INDENT> if not isinstance(logger, logging.Logger): <NEW_LINE> <INDENT> raise TypeError("logger must be a Logger instance") <NEW_LINE> <DEDENT> if not isinstance(job_num, int): <NEW_LINE> <INDENT> raise TypeError("job_num must be an integer") <NEW_LINE> <DEDENT> elif job_num < 0: <NEW_LINE> <INDENT> raise ValueError("job_num must be a nonnegative integer") <NEW_LINE> <DEDENT> self.logger = logger <NEW_LINE> self.job_num = job_num <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def await_ready(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def teardown(self, finished=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._do_teardown() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if finished: <NEW_LINE> <INDENT> for handler in self.logger.handlers: <NEW_LINE> <INDENT> logging.flush.close_later(handler) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _do_teardown(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_running(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_internal_connection_string(self): <NEW_LINE> <INDENT> raise NotImplementedError("get_connection_string must be implemented by Fixture subclasses") <NEW_LINE> <DEDENT> def get_driver_connection_url(self): <NEW_LINE> <INDENT> raise NotImplementedError( "get_driver_connection_url must be implemented by Fixture subclasses") <NEW_LINE> <DEDENT> def mongo_client(self, read_preference=pymongo.ReadPreference.PRIMARY, timeout_millis=30000): <NEW_LINE> <INDENT> kwargs = {"connectTimeoutMS": timeout_millis} <NEW_LINE> if pymongo.version_tuple[0] >= 3: <NEW_LINE> <INDENT> kwargs["serverSelectionTimeoutMS"] = timeout_millis <NEW_LINE> kwargs["connect"] = True <NEW_LINE> <DEDENT> return pymongo.MongoClient(host=self.get_driver_connection_url(), read_preference=read_preference, **kwargs) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s (Job #%d)" % (self.__class__.__name__, self.job_num) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%r(%r, %r)" % (self.__class__.__name__, self.logger, self.job_num)
Base class for all fixtures.
6259906c76e4537e8c3f0da2
class StatsManagerTest(TestCase): <NEW_LINE> <INDENT> def test_parse_stats(self): <NEW_LINE> <INDENT> pm_file_example = pkg_resources.resource_string(__name__, 'pm_file_example.xml') <NEW_LINE> root = ElementTree.fromstring(pm_file_example) <NEW_LINE> mgr = StatsManager() <NEW_LINE> mgr.parse_pm_xml(root) <NEW_LINE> rrc_estab_attempts = metrics.STAT_RRC_ESTAB_ATT.collect() <NEW_LINE> self.assertEqual(rrc_estab_attempts[0].samples[0][2], 123) <NEW_LINE> rrc_estab_successes = metrics.STAT_RRC_ESTAB_SUCC.collect() <NEW_LINE> self.assertEqual(rrc_estab_successes[0].samples[0][2], 99) <NEW_LINE> rrc_reestab_att_reconf_fail = metrics.STAT_RRC_REESTAB_ATT_RECONF_FAIL.collect() <NEW_LINE> self.assertEqual(rrc_reestab_att_reconf_fail[0].samples[0][2], 654) <NEW_LINE> erab_rel_req_radio_conn_lost = metrics.STAT_ERAB_REL_REQ_RADIO_CONN_LOST.collect() <NEW_LINE> self.assertEqual(erab_rel_req_radio_conn_lost[0].samples[0][2], 65537)
Tests for eNodeB statistics manager
6259906ca17c0f6771d5d7b8
@dataclass <NEW_LINE> class FontFamily: <NEW_LINE> <INDENT> provider: str <NEW_LINE> name: str
Font family description for the settings
6259906c091ae35668706453
class TestingConfig(Config): <NEW_LINE> <INDENT> TESTING = True <NEW_LINE> SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or 'sqlite:///' + os.path.join(basedir, 'data-test.sqlite')
Test config
6259906c097d151d1a2c288f
class LookupFormatter: <NEW_LINE> <INDENT> def __init__(self, model): <NEW_LINE> <INDENT> self._model = model <NEW_LINE> self._modelInstance = self._model() <NEW_LINE> <DEDENT> def setDisplay(self, itemid): <NEW_LINE> <INDENT> md = self._modelInstance.queryObject().filter(self._model.id == itemid).first() <NEW_LINE> if md: <NEW_LINE> <INDENT> return md.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Formatter for displaying user-friendly information about a checkup model
6259906c3346ee7daa33826e
class EnumValueChecker(object): <NEW_LINE> <INDENT> def __init__(self, enum_type): <NEW_LINE> <INDENT> self._enum_type = enum_type <NEW_LINE> <DEDENT> def CheckValue(self, proposed_value): <NEW_LINE> <INDENT> if not isinstance(proposed_value, int): <NEW_LINE> <INDENT> message = ('%.1024r has type %s, but expected one of: %s' % (proposed_value, type(proposed_value), (int, int))) <NEW_LINE> raise TypeError(message) <NEW_LINE> <DEDENT> if proposed_value not in self._enum_type.values_by_number: <NEW_LINE> <INDENT> raise ValueError('Unknown enum value: %d' % proposed_value) <NEW_LINE> <DEDENT> return proposed_value
Checker used for enum fields. Performs type-check and range check.
6259906c4428ac0f6e659d53
class DepartureAirport(RequestConstructor): <NEW_LINE> <INDENT> def __init__(self, airport_code: str, city: str, terminal: str, gate: str): <NEW_LINE> <INDENT> self.syntax = { 'airport_code': airport_code, 'city': city, 'terminal': terminal, 'gate': gate }
Departure airport. Args: airport_code: Airport code of the departure airport. city: Departure city of the flight. terminal: Terminal of the departing flight. gate: Gate for the departing flight.
6259906c460517430c432c66
class CreatePhoneTokenView(GenericAPIView): <NEW_LINE> <INDENT> serializer_class = CreatePhoneTokenSerializer <NEW_LINE> permission_classes = (AllowAny,) <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> data = self.serializer_class(data=request.data) <NEW_LINE> data.is_valid(raise_exception=True) <NEW_LINE> phone = data.validated_data['phone'] <NEW_LINE> phone_token = PhoneToken.objects.create(phone=phone) <NEW_LINE> msg = 'Hello from Tech Heroes! Verify your phone with this token: {0}'.format(phone_token.token) <NEW_LINE> try: <NEW_LINE> <INDENT> send_sms(phone, msg) <NEW_LINE> <DEDENT> except SendSMSError as e: <NEW_LINE> <INDENT> error = {'detail': str(e)} <NEW_LINE> return Response(error, status=status.HTTP_424_FAILED_DEPENDENCY) <NEW_LINE> <DEDENT> return Response({}, status=status.HTTP_201_CREATED)
POST: Create a phone_token for a particular phone number.
6259906c99cbb53fe6832708