code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Song(Media): <NEW_LINE> <INDENT> def __init__(self, title="No Title", author="No Author", release_year="No Release Year", url="No URL", album="No Album", genre="No Genre", track_length=0, json=None): <NEW_LINE> <INDENT> super().__init__(title, author, release_year, url, json) <NEW_LINE> if (json is None): <NEW_LINE> <INDENT> self.album = album <NEW_LINE> self.genre = genre <NEW_LINE> self.track_length = track_length <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.title = json["trackName"] <NEW_LINE> self.album = json["collectionName"] <NEW_LINE> self.genre = json["primaryGenreName"] <NEW_LINE> self.track_length = json["trackTimeMillis"] <NEW_LINE> <DEDENT> <DEDENT> def info(self): <NEW_LINE> <INDENT> return f"{super().info()} [{self.genre}]" <NEW_LINE> <DEDENT> def length(self): <NEW_LINE> <INDENT> rounded_length = round(self.track_length/1000) <NEW_LINE> return rounded_length
ADD DOCSTRING
625990326fece00bbaccca88
@dataclass <NEW_LINE> class RouteInfo: <NEW_LINE> <INDENT> origin: int <NEW_LINE> destination: int <NEW_LINE> o_name: str <NEW_LINE> d_name: str <NEW_LINE> name: str <NEW_LINE> nodes: List
Basic OD information for a route.
62599032a4f1c619b294f6cf
class Script: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def sign(privkey, data): <NEW_LINE> <INDENT> return rsa.sign(data.encode(), privkey, 'SHA-256') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def verify(data, signature, pubkey): <NEW_LINE> <INDENT> if data is None or signature is None or pubkey is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> rsa.verify(data.encode(), signature, pubkey) <NEW_LINE> <DEDENT> except rsa.pkcs1.VerificationError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def sha160(data): <NEW_LINE> <INDENT> sha = hashlib.sha256(data.encode('utf-8')) <NEW_LINE> hash_256_value = sha.hexdigest() <NEW_LINE> obj = hashlib.new('ripemd160', hash_256_value.encode('utf-8')) <NEW_LINE> ripemd_160_value = obj.hexdigest() <NEW_LINE> return ripemd_160_value <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def check_tx_script(data, script_sig, script_pubkey): <NEW_LINE> <INDENT> stack = Stack() <NEW_LINE> for element in script_sig: <NEW_LINE> <INDENT> stack.push(element) <NEW_LINE> <DEDENT> for element in script_pubkey: <NEW_LINE> <INDENT> if element == OP_DUP: <NEW_LINE> <INDENT> top = stack.peek() <NEW_LINE> stack.push(top) <NEW_LINE> <DEDENT> elif element == OP_HASH160: <NEW_LINE> <INDENT> top = str(stack.pop()) <NEW_LINE> stack.push(Script.sha160(top)) <NEW_LINE> <DEDENT> elif element == OP_EQUALVERIFY: <NEW_LINE> <INDENT> top_1 = stack.pop() <NEW_LINE> top_2 = stack.pop() <NEW_LINE> if top_1 != top_2: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> elif element == OP_CHECKSIG: <NEW_LINE> <INDENT> pubkey = stack.pop() <NEW_LINE> signature = stack.pop() <NEW_LINE> result = Script.verify(data, signature, pubkey) <NEW_LINE> stack.push(result) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stack.push(element) <NEW_LINE> <DEDENT> <DEDENT> if stack.size() == 1 and stack.peek() is True: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
脚本流程 栈顶元素复制->加密->校验签名
625990323eb6a72ae038b73f
class Initializer: <NEW_LINE> <INDENT> def __init__( self ): <NEW_LINE> <INDENT> self.SOURCE_DIR_PATH = os.environ['ANXEOD__SOURCE_DIR_PATH'] <NEW_LINE> self.DESTINATION_PATH = os.environ['ANXEOD__TRACKER_A_PATH'] <NEW_LINE> self.filepath_tracker = [] <NEW_LINE> self.start = datetime.datetime.now() <NEW_LINE> self.files: list = glob.glob( f'{self.SOURCE_DIR_PATH}/*.dat' ) <NEW_LINE> <DEDENT> def initialize_tracker( self ): <NEW_LINE> <INDENT> log.debug( f'len(files), `{len(self.files)}`' ) <NEW_LINE> for path in self.files: <NEW_LINE> <INDENT> self.build_initial_tracker( path ) <NEW_LINE> <DEDENT> sorted_filepath_tracker = self.build_sorted_tracker() <NEW_LINE> time_taken = str( datetime.datetime.now() - self.start ) <NEW_LINE> log.debug( f'time_taken, `{time_taken}`' ) <NEW_LINE> with open( self.DESTINATION_PATH, 'w' ) as f: <NEW_LINE> <INDENT> jsn: str = json.dumps( sorted_filepath_tracker, sort_keys=True, indent=2 ) <NEW_LINE> f.write( jsn ) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def build_initial_tracker( self, path: str ) -> None: <NEW_LINE> <INDENT> file_timestamp: float = os.path.getmtime( path ) <NEW_LINE> timestamp: datetime.datetime = datetime.datetime.fromtimestamp( file_timestamp ) <NEW_LINE> info: dict = { 'path': path, 'timestamp': timestamp, 'updated': None } <NEW_LINE> self.filepath_tracker.append( info ) <NEW_LINE> return <NEW_LINE> <DEDENT> def build_sorted_tracker( self ) -> list: <NEW_LINE> <INDENT> sorted_filepath_tracker: list = sorted( self.filepath_tracker, key=itemgetter('timestamp') ) <NEW_LINE> for entry in sorted_filepath_tracker: <NEW_LINE> <INDENT> entry['timestamp'] = str( entry['timestamp'] ) <NEW_LINE> <DEDENT> log.debug( f'len(sorted_filepath_tracker), `{len(sorted_filepath_tracker)}`' ) <NEW_LINE> return sorted_filepath_tracker
Creates initial tracker.
6259903215baa72349463072
class UserProfile(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(User) <NEW_LINE> feeds = models.ManyToManyField(to=Feed, through='profiles.Subscription') <NEW_LINE> entries = models.ManyToManyField(to=Entry, through='profiles.UserEntryDetail') <NEW_LINE> next_slug = RandomSlugField(slug_length=10) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"{}'s profile".format(self.user.username) <NEW_LINE> <DEDENT> def subscribe(self, feed): <NEW_LINE> <INDENT> Subscription.objects.create(profile=self, feed=feed) <NEW_LINE> <DEDENT> def unsubscribe(self, feed): <NEW_LINE> <INDENT> Subscription.objects.get(profile=self, feed=feed).delete() <NEW_LINE> self._get_entries(feed).delete() <NEW_LINE> <DEDENT> def mark_read(self, feed): <NEW_LINE> <INDENT> entries = self._get_entries(feed) <NEW_LINE> for entry in entries: <NEW_LINE> <INDENT> entry.read = True <NEW_LINE> entry.save() <NEW_LINE> <DEDENT> <DEDENT> def mark_unread(self, feed): <NEW_LINE> <INDENT> entries = self._get_entries(feed) <NEW_LINE> for entry in entries: <NEW_LINE> <INDENT> entry.read = False <NEW_LINE> entry.save() <NEW_LINE> <DEDENT> <DEDENT> def unread_entries(self, feed): <NEW_LINE> <INDENT> return self._get_entries(feed).filter(read=False).count() <NEW_LINE> <DEDENT> def _get_entries(self, feed): <NEW_LINE> <INDENT> return UserEntryDetail.objects.filter(profile=self, entry__feed=feed)
A user profile.
6259903291af0d3eaad3af06
class OsuConverter(commands.IDConverter): <NEW_LINE> <INDENT> async def convert(self, ctx, argument): <NEW_LINE> <INDENT> id_ = self._get_id_match(argument) or re.match(r'<@!?([0-9]+)>$', argument) <NEW_LINE> if id_: <NEW_LINE> <INDENT> result = ctx.guild.get_member(int(id_.group(1))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = argument <NEW_LINE> <DEDENT> if not result: <NEW_LINE> <INDENT> raise commands.BadArgument(f'Could not find a guild matching <{argument}>', argument) <NEW_LINE> <DEDENT> return result
Converts to a Guild.
6259903226068e7796d4da23
class STrustGetter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(STrustGetter, self).__init__() <NEW_LINE> self.config = ConfigX() <NEW_LINE> self.user = {} <NEW_LINE> self.relations = self.get_relations() <NEW_LINE> self.followees = defaultdict(dict) <NEW_LINE> self.followers = {} <NEW_LINE> self.matrix_User = {} <NEW_LINE> self.matrix_Item = {} <NEW_LINE> self.generate_data_set() <NEW_LINE> <DEDENT> def generate_data_set(self): <NEW_LINE> <INDENT> triple = [] <NEW_LINE> for line in self.relations: <NEW_LINE> <INDENT> userId1, userId2, weight = line <NEW_LINE> if not userId1 in self.followees: <NEW_LINE> <INDENT> self.followees[userId1] = {} <NEW_LINE> <DEDENT> self.followees[userId1][userId2] = weight <NEW_LINE> if not userId2 in self.followers: <NEW_LINE> <INDENT> self.followers[userId2] = {} <NEW_LINE> <DEDENT> self.followers[userId2][userId1] = weight <NEW_LINE> if not userId1 in self.user: <NEW_LINE> <INDENT> userid1 = self.user[userId1] = len(self.user) <NEW_LINE> <DEDENT> if not userId2 in self.user: <NEW_LINE> <INDENT> userid2 = self.user[userId2] = len(self.user) <NEW_LINE> <DEDENT> if not userid1 in self.matrix_User: <NEW_LINE> <INDENT> self.matrix_User[userid1] = {} <NEW_LINE> <DEDENT> if not userid2 in self.matrix_User: <NEW_LINE> <INDENT> self.matrix_Item[userid2] = {} <NEW_LINE> <DEDENT> self.matrix_User[userid1][userid2] = weight <NEW_LINE> self.matrix_Item[userid2][userid1] = weight <NEW_LINE> <DEDENT> <DEDENT> def get_relations(self): <NEW_LINE> <INDENT> df_save = pd.read_csv('E:\\testdata\\Epin2\\Epin-final.csv', header=None) <NEW_LINE> df_save.columns = ['no','trustor','trustee','trust lable'] <NEW_LINE> save1=df_save.ix[:,1:] <NEW_LINE> save=np.array(save1) <NEW_LINE> save=save.tolist() <NEW_LINE> for i in range(len(save)): <NEW_LINE> <INDENT> if save[i][2]==1: <NEW_LINE> <INDENT> u_from, u_to, t = int(save[i][0]), int(save[i][1]), save[i][2] <NEW_LINE> yield (int(u_from), int(u_to), float(t)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_followees(self, u): <NEW_LINE> <INDENT> if u in self.followees: <NEW_LINE> <INDENT> return self.followees[u] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def get_followers(self, u): <NEW_LINE> <INDENT> if u in self.followers: <NEW_LINE> <INDENT> return self.followers[u] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def weight(self, u, k): <NEW_LINE> <INDENT> if u in self.followees and k in self.followees[u]: <NEW_LINE> <INDENT> return self.followees[u][k] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0
docstring for TrustGetter read trust data and save the global parameters
62599032711fe17d825e1507
class CiscoNXOS(IOSLikeDevice): <NEW_LINE> <INDENT> _disable_paging_command = "terminal length 0" <NEW_LINE> @staticmethod <NEW_LINE> def _normalize_linefeeds(a_string): <NEW_LINE> <INDENT> newline = re.compile(r"(\r\r\n|\r\n)") <NEW_LINE> return newline.sub("\n", a_string).replace("\r", "")
Class for working with Cisco Nexus/NX-OS
6259903250485f2cf55dc056
class returned_values: <NEW_LINE> <INDENT> value=None <NEW_LINE> complement=None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass
this class defines the structure of returned_value a returned value has the used value and its complement the free value
625990328c3a8732951f7632
@dataclass <NEW_LINE> class DataCollatorForLanguageModeling: <NEW_LINE> <INDENT> tokenizer: PreTrainedTokenizer <NEW_LINE> mlm: bool = True <NEW_LINE> mlm_probability: float = 0.15 <NEW_LINE> def __call__(self, examples: List[Union[torch.Tensor, Dict[str, torch.Tensor]]]) -> Dict[str, torch.Tensor]: <NEW_LINE> <INDENT> if isinstance(examples[0], (dict, BatchEncoding)): <NEW_LINE> <INDENT> examples = [e["input_ids"] for e in examples] <NEW_LINE> <DEDENT> batch = self._tensorize_batch(examples) <NEW_LINE> if self.mlm: <NEW_LINE> <INDENT> inputs, labels = self.mask_tokens(batch) <NEW_LINE> return {"input_ids": inputs, "labels": labels} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> labels = batch.clone().detach() <NEW_LINE> if self.tokenizer.pad_token_id is not None: <NEW_LINE> <INDENT> labels[labels == self.tokenizer.pad_token_id] = -100 <NEW_LINE> <DEDENT> return {"input_ids": batch, "labels": labels} <NEW_LINE> <DEDENT> <DEDENT> def _tensorize_batch(self, examples: List[torch.Tensor]) -> torch.Tensor: <NEW_LINE> <INDENT> length_of_first = examples[0].size(0) <NEW_LINE> are_tensors_same_length = all(x.size(0) == length_of_first for x in examples) <NEW_LINE> if are_tensors_same_length: <NEW_LINE> <INDENT> return torch.stack(examples, dim=0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.tokenizer._pad_token is None: <NEW_LINE> <INDENT> raise ValueError( "You are attempting to pad samples but the tokenizer you are using" f" ({self.tokenizer.__class__.__name__}) does not have one." ) <NEW_LINE> <DEDENT> return pad_sequence(examples, batch_first=True, padding_value=self.tokenizer.pad_token_id) <NEW_LINE> <DEDENT> <DEDENT> def mask_tokens(self, inputs: torch.Tensor) -> Tuple[torch.Tensor, torch.Tensor]: <NEW_LINE> <INDENT> if self.tokenizer.mask_token is None: <NEW_LINE> <INDENT> raise ValueError( "This tokenizer does not have a mask token which is necessary for masked language modeling. Remove the --mlm flag if you want to use this tokenizer." ) <NEW_LINE> <DEDENT> labels = inputs.clone() <NEW_LINE> probability_matrix = torch.full(labels.shape, self.mlm_probability) <NEW_LINE> special_tokens_mask = [ self.tokenizer.get_special_tokens_mask(val, already_has_special_tokens=True) for val in labels.tolist() ] <NEW_LINE> probability_matrix.masked_fill_(torch.tensor(special_tokens_mask, dtype=torch.bool), value=0.0) <NEW_LINE> if self.tokenizer._pad_token is not None: <NEW_LINE> <INDENT> padding_mask = labels.eq(self.tokenizer.pad_token_id) <NEW_LINE> probability_matrix.masked_fill_(padding_mask, value=0.0) <NEW_LINE> <DEDENT> masked_indices = torch.bernoulli(probability_matrix).bool() <NEW_LINE> labels[~masked_indices] = -100 <NEW_LINE> indices_replaced = torch.bernoulli(torch.full(labels.shape, 0.8)).bool() & masked_indices <NEW_LINE> inputs[indices_replaced] = self.tokenizer.convert_tokens_to_ids(self.tokenizer.mask_token) <NEW_LINE> indices_random = torch.bernoulli(torch.full(labels.shape, 0.5)).bool() & masked_indices & ~indices_replaced <NEW_LINE> random_words = torch.randint(len(self.tokenizer), labels.shape, dtype=torch.long) <NEW_LINE> inputs[indices_random] = random_words[indices_random] <NEW_LINE> return inputs, labels
Data collator used for language modeling. - collates batches of tensors, honoring their tokenizer's pad_token - preprocesses batches for masked language modeling
6259903266673b3332c314cc
class PrintHelp(Exception): <NEW_LINE> <INDENT> pass
raise this to print a message and the argparse help
625990321d351010ab8f4bf3
class CurrentControlledVoltageSource(VoltageSource): <NEW_LINE> <INDENT> def __init__(self, name, element, evaluated_paramsd, parent): <NEW_LINE> <INDENT> r_expresion = element.paramsl[-1] <NEW_LINE> r_value = scs_parser.evaluate_param('_r', {'_r': r_expresion}, evaluated_paramsd, parent) <NEW_LINE> self.names = [name, element.paramsl[-2]] <NEW_LINE> self.nets = element.paramsl[:-2] <NEW_LINE> self.values = [sympy.sympify(r_value,sympy.abc._clash)]
Object with instance of current controlled voltage source of a circtuit
625990328e05c05ec3f6f6c8
class Service(object): <NEW_LINE> <INDENT> name = None <NEW_LINE> requests = None <NEW_LINE> waiting = None <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.requests = [] <NEW_LINE> self.waiting = [] <NEW_LINE> self.idle = []
a single Service
62599032ac7a0e7691f735c2
class Journal(Service): <NEW_LINE> <INDENT> writing = defer.succeed(None) <NEW_LINE> def write(self, occurence=None): <NEW_LINE> <INDENT> if occurence and occurence.name == 'signal': <NEW_LINE> <INDENT> if occurence.signum != signal.SIGTERM: return <NEW_LINE> log('Attempting to save journal before shutdown.') <NEW_LINE> <DEDENT> if self.writing.called: <NEW_LINE> <INDENT> self.writing = threads.deferToThread(self.blocking_journal_write) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log('Journal is still being written from previous iteration,' + ' will hold off until next iteration') <NEW_LINE> <DEDENT> <DEDENT> def _journal_failure(self, occurrence): <NEW_LINE> <INDENT> badFile = occurrence.journal <NEW_LINE> log('File %s is corrupt' % (badFile,)) <NEW_LINE> os.rename(badFile, badFile+str('_corrupt')) <NEW_LINE> log('renaming the file to preserve system history') <NEW_LINE> <DEDENT> def blocking_journal_write(self): <NEW_LINE> <INDENT> if not self._task.running: return <NEW_LINE> now = int( time.time() ) <NEW_LINE> snapshot = '%d.pickle' % now <NEW_LINE> path = os.path.join(config.JOURNAL_DIR, snapshot) <NEW_LINE> outfile = open(path, 'wb') <NEW_LINE> c = 0 <NEW_LINE> for obj in gc.get_objects(): <NEW_LINE> <INDENT> if isinstance(obj, Entity) and obj.serializable and obj.__class__.isValid(obj): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = obj.serialize() <NEW_LINE> outfile.write(data) <NEW_LINE> c += 1 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> failure = err('Exception Serializing an Object') <NEW_LINE> Event('journal-error').fire(failure=failure, journal=path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> outfile.close() <NEW_LINE> plural = 's' <NEW_LINE> if c == 1: plural = '' <NEW_LINE> log('stored %d object%s' % (c,plural)) <NEW_LINE> old = sorted( map(int, list_snapshots()) )[:-config.JOURNAL_RETENTION] <NEW_LINE> for timestamp in old: <NEW_LINE> <INDENT> path = os.path.join(config.JOURNAL_DIR, str(timestamp) + SUFFIX) <NEW_LINE> os.unlink(path) <NEW_LINE> <DEDENT> <DEDENT> def startService(self): <NEW_LINE> <INDENT> self._task = task.LoopingCall(self.write) <NEW_LINE> reactor.callLater( config.JOURNAL_FREQUENCY, self._task.start, config.JOURNAL_FREQUENCY ) <NEW_LINE> Event('journal-error').subscribe(self._journal_failure) <NEW_LINE> Event('instance-started').subscribe(self.write) <NEW_LINE> Event('signal').subscribe(self.write) <NEW_LINE> Service.startService(self) <NEW_LINE> <DEDENT> def stopService(self): <NEW_LINE> <INDENT> Event('instance-started').unsubscribe(self.write) <NEW_LINE> Event('signal').unsubscribe(self.write) <NEW_LINE> Event('journal-error').unsubscribe(self._journal_failure) <NEW_LINE> if self._task.running: self._task.stop() <NEW_LINE> if not self.writing.called: <NEW_LINE> <INDENT> self.writing.addBoth(lambda x: Service.stopService(self) and x or x) <NEW_LINE> return self.writing <NEW_LINE> <DEDENT> Service.stopService(self)
This Service is responsible for maintaining DroneD's notion of environmental state.
625990329b70327d1c57fe5f
class SnapshotRelayTest(TestCase): <NEW_LINE> <INDENT> def test_should_put_vehicle_snapshot_on_the_sim_queue_when_translation_event_is_received(self): <NEW_LINE> <INDENT> snapshots = Queue() <NEW_LINE> _ = SnapshotRelay(snapshots) <NEW_LINE> mock_lane = mock() <NEW_LINE> mock_lane.id = 10 <NEW_LINE> vehicle = Vehicle(0, mock_lane) <NEW_LINE> E_TRANSLATE.send(sender=vehicle, timestamp=100) <NEW_LINE> expected_snapshot = {'acc': 0.0, 'position': 0.0, 'lane': mock_lane.id, 'id': 0, 'velocity': 0.0, 'time': 100} <NEW_LINE> self.assertEqual(snapshots.get(), expected_snapshot)
A note about this line: relay = SnapshotRelay(snapshots) The SnapshotRelay object will not put anything on the queue if when it is instantiated, the instance is not assigned to an variable. This is very strange. WHAT DOES PYTHON DO WHEN YOU ASSIGN A NEW INSTANCE OF A CLASS TO A VARIABLE THAT IT DOESN'T DO WHEN A VARIABLE IS NOT CREATED FOR THE INSTANCE??
625990328c3a8732951f7633
class Post(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'posts' <NEW_LINE> id = db.Column(db.Integer, primary_key = True) <NEW_LINE> body = db.Column(db.Text) <NEW_LINE> timestamp = db.Column(db.DateTime, index = True, default = datetime.utcnow) <NEW_LINE> author_id = db.Column(db.Integer, db.ForeignKey('users.id')) <NEW_LINE> body_html = db.Column(db.Text) <NEW_LINE> comments = db.relationship('Comment', backref='post', lazy='dynamic') <NEW_LINE> @staticmethod <NEW_LINE> def generate_fake(count = 100): <NEW_LINE> <INDENT> from random import seed, randint <NEW_LINE> import forgery_py <NEW_LINE> seed() <NEW_LINE> user_count = User.query.count() <NEW_LINE> for i in range(count): <NEW_LINE> <INDENT> u = User.query.offset(randint(0, user_count - 1)).first() <NEW_LINE> p = Post(body = forgery_py.lorem_ipsum.sentences(randint(1, 3)), timestamp = forgery_py.date.date(True), author = u) <NEW_LINE> db.session.add(p) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def on_changed_body(target, value, oldvalue, initiator): <NEW_LINE> <INDENT> allowed_tags = ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'pre', 'strong', 'ul', 'h1', 'h2', 'h3', 'p'] <NEW_LINE> target.body_html = bleach.linkify(bleach.clean( markdown(value, output_format = 'html'), tags = allowed_tags, strip = True))
定于用户专栏文章模型
6259903221bff66bcd723d40
class SolarEdgeStorageLevelSensor(SolarEdgeSensor): <NEW_LINE> <INDENT> _attr_device_class = DEVICE_CLASS_BATTERY <NEW_LINE> def __init__( self, platform_name: str, sensor_key: str, data_service: SolarEdgeDataService ) -> None: <NEW_LINE> <INDENT> super().__init__(platform_name, sensor_key, data_service) <NEW_LINE> self._json_key = SENSOR_TYPES[self.sensor_key][0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self) -> str | None: <NEW_LINE> <INDENT> attr = self.data_service.attributes.get(self._json_key) <NEW_LINE> if attr and "soc" in attr: <NEW_LINE> <INDENT> return attr["soc"] <NEW_LINE> <DEDENT> return None
Representation of an SolarEdge Monitoring API storage level sensor.
6259903266673b3332c314ce
class MetadataRDBMSFactory(AbstractMetadataFactory): <NEW_LINE> <INDENT> def __init__(self, entity, name, **kwargs): <NEW_LINE> <INDENT> super(MetadataRDBMSFactory, self).__init__(entity, name, kwargs) <NEW_LINE> <DEDENT> def create_admin(self): <NEW_LINE> <INDENT> admin = MetadataAdmin(self._entity, self._name, **self._addl_params) <NEW_LINE> self._metadata.add(admin) <NEW_LINE> <DEDENT> def create_desc(self): <NEW_LINE> <INDENT> desc = MetadataDesc(self._entity, self._name, **self._addl_params) <NEW_LINE> self._metadata.add(desc) <NEW_LINE> <DEDENT> def create_tech(self): <NEW_LINE> <INDENT> tech = MetadataTechRDBMS(self._entity, self._name, **self._addl_params) <NEW_LINE> self._metadata.add(tech) <NEW_LINE> <DEDENT> def create_process(self): <NEW_LINE> <INDENT> process = MetadataProcess(self._entity, self._name, **self._addl_params) <NEW_LINE> self._metadata.add(process)
Builds a Metadata object for RDBMS based DataSource and DataStore objects.
625990326fece00bbaccca8c
@ModelComponentFactory.register("Uncertainty set in a robust problem") <NEW_LINE> class UncSet(SimpleBlock): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _rule = kwargs.pop('rule', None) <NEW_LINE> _fixed = kwargs.pop('fixed', None) <NEW_LINE> _param = kwargs.pop('param', None) <NEW_LINE> kwargs.setdefault('ctype', UncSet) <NEW_LINE> SimpleBlock.__init__(self, *args, **kwargs) <NEW_LINE> self._rule = _rule <NEW_LINE> if isinstance(_fixed, Component): <NEW_LINE> <INDENT> self._fixed = [_fixed] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._fixed = _fixed <NEW_LINE> <DEDENT> if isinstance(_param, Component): <NEW_LINE> <INDENT> self._param = [_param] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._param = _param <NEW_LINE> <DEDENT> self._lib = False <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> if self._lib: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for c in self.component_data_objects(Constraint, active=True): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def is_lib(self): <NEW_LINE> <INDENT> return self._lib <NEW_LINE> <DEDENT> def get_uncertain_param(self): <NEW_LINE> <INDENT> param = None <NEW_LINE> for p in self.component_objects(UncParam, active=True): <NEW_LINE> <INDENT> if param is None: <NEW_LINE> <INDENT> param = p <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert param is p <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def generate_cons_from_lib(self, param): <NEW_LINE> <INDENT> name = self.__class__.__name__ <NEW_LINE> raise NotImplementedError( "Looks like the cutting plane solver is not applicable to " "library set '{}'. Try 'romodel.reformulate'".format(name) )
This model component defines an uncertainty set in a robust optimization problem.
625990323eb6a72ae038b743
class City(BaseModel): <NEW_LINE> <INDENT> name = "" <NEW_LINE> state_id = ""
class City that inherits from BaseModel:
62599032d4950a0f3b1116ac
class UserDetailsSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('pk', 'username', 'email', 'name', 'nickname', 'anonymous') <NEW_LINE> read_only_fields = ('username', 'email')
User model w/o password
6259903226068e7796d4da27
class NameInferenceError(InferenceError): <NEW_LINE> <INDENT> name = None <NEW_LINE> scope = None <NEW_LINE> def __init__(self, message="{name!r} not found in {scope!r}.", **kws): <NEW_LINE> <INDENT> super().__init__(message, **kws)
Raised when a name lookup fails, corresponds to NameError. Standard attributes: name: The name for which lookup failed, as a string. scope: The node representing the scope in which the lookup occurred. context: InferenceContext object.
625990320a366e3fb87ddac3
class UserSimpleForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('first_name', 'last_name')
Form for registration page
625990325e10d32532ce4171
class RegisterView(View): <NEW_LINE> <INDENT> def get(self,request): <NEW_LINE> <INDENT> register_form = RegisterForm() <NEW_LINE> return render(request,'register.html',{'register_form':register_form}) <NEW_LINE> <DEDENT> def post(self,request): <NEW_LINE> <INDENT> register_form = RegisterForm(request.POST) <NEW_LINE> if register_form.is_valid(): <NEW_LINE> <INDENT> user_name = request.POST.get('email', None) <NEW_LINE> pass_word = request.POST.get('password', None) <NEW_LINE> user_profile = UserProfile() <NEW_LINE> user_profile.username = user_name <NEW_LINE> user_profile.email = user_name <NEW_LINE> user_profile.is_active = False <NEW_LINE> user_profile.password = make_password(pass_word) <NEW_LINE> user_profile.save() <NEW_LINE> send_register_eamil(user_name,'register') <NEW_LINE> return render(request,'login.html') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render(request,'register.html',{'register_form':register_form})
用户注册
62599032796e427e5384f859
class HTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_init(self): <NEW_LINE> <INDENT> p = H(4, "Lesson7 - HTML") <NEW_LINE> f = StringIO() <NEW_LINE> p.render(f) <NEW_LINE> expected = "<h4>Lesson7 - HTML</h4>\n" <NEW_LINE> self.assertEqual(expected, f.getvalue())
Test number level
62599032d53ae8145f919541
class Description: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.title = "" <NEW_LINE> self.score = None <NEW_LINE> self.bits = None <NEW_LINE> self.e = None <NEW_LINE> self.num_alignments = None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%-66s %5s %s" % (self.title, self.score, self.e)
Stores information about one hit in the descriptions section. Members: title Title of the hit. score Number of bits. (int) bits Bit score. (float) e E value. (float) num_alignments Number of alignments for the same subject. (int)
625990328a349b6b4368731b
class Sounds(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pygame.mixer.init() <NEW_LINE> <DEDENT> def stop_all(self): <NEW_LINE> <INDENT> pygame.mixer.stop() <NEW_LINE> <DEDENT> def start_turn(self): <NEW_LINE> <INDENT> soundfile = 'sounds/263125_pan14_sine-fifths-up-beep.ogg' <NEW_LINE> pygame.mixer.Sound(soundfile).play() <NEW_LINE> <DEDENT> def end_turn(self): <NEW_LINE> <INDENT> soundfile = 'sounds/263124_pan14_sine-octaves-up-beep.ogg' <NEW_LINE> pygame.mixer.Sound(soundfile).play() <NEW_LINE> <DEDENT> def pause_turn(self): <NEW_LINE> <INDENT> soundfile = 'sounds/263655__pan14__upward-beep-chromatic-fifths.ogg' <NEW_LINE> pygame.mixer.Sound(soundfile).play() <NEW_LINE> <DEDENT> def end_of_round(self): <NEW_LINE> <INDENT> soundfile = 'sounds/407237__pointparkcinema__computer-chirp-2.ogg' <NEW_LINE> pygame.mixer.Sound(soundfile).play() <NEW_LINE> <DEDENT> def out_of_game(self): <NEW_LINE> <INDENT> soundfile = 'sounds/263123__pan14__sine-tri-tone-down-negative-beep-amb-verb.ogg' <NEW_LINE> pygame.mixer.Sound(soundfile).play() <NEW_LINE> <DEDENT> def warning(self): <NEW_LINE> <INDENT> soundfile = 'sounds/10_second_countdown.ogg' <NEW_LINE> pygame.mixer.Sound(soundfile).play()
contains all sounds of the game
625990321f5feb6acb163cce
class TestDependencySet(TestPackageClass): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestDependencySet, self).setUp() <NEW_LINE> self.package = "fabtools" <NEW_LINE> self.version = "0.19.0" <NEW_LINE> self.fab_reqs = json.load( open("tests/deputils_data/fabtools_0_19_0_req.json", 'r')) <NEW_LINE> self.ancestors, self.descendants = Package.get_direct_links_to_any_package( self.package, self.venv.edges) <NEW_LINE> <DEDENT> def test_sanity(self): <NEW_LINE> <INDENT> DepTools.check_changes_in_requirements_vs_env( self.fab_reqs, self.descendants) <NEW_LINE> <DEDENT> def test_returns_dict(self): <NEW_LINE> <INDENT> res = DepTools.check_changes_in_requirements_vs_env( self.fab_reqs, self.descendants) <NEW_LINE> self.assertEqual(type(res), dict) <NEW_LINE> <DEDENT> def test_empty_lists_in_removed_and_new_for_fabtools(self): <NEW_LINE> <INDENT> res = DepTools.check_changes_in_requirements_vs_env( self.fab_reqs, self.descendants) <NEW_LINE> self.assertEqual(res['removed_deps'], []) <NEW_LINE> self.assertEqual(res['new_deps'], [])
Dependency set tests, methods: - check_changes_in_requirements_vs_env good package for test is fabtools : 0.19.0
6259903250485f2cf55dc05b
class Http409(JSONErrorResponse): <NEW_LINE> <INDENT> status_code = 409
HTTP 409 Conflict
6259903215baa72349463078
class MkdirFileLock(LockBase): <NEW_LINE> <INDENT> def __init__(self, path, threaded=True): <NEW_LINE> <INDENT> LockBase.__init__(self, path, threaded) <NEW_LINE> if threaded: <NEW_LINE> <INDENT> tname = "{0:x}-".format(_thread.get_ident()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tname = "" <NEW_LINE> <DEDENT> self.unique_name = os.path.join( self.lock_file, "{0}.{1}{2}".format( self.hostname, tname, self.pid ) ) <NEW_LINE> <DEDENT> def acquire(self, timeout=None): <NEW_LINE> <INDENT> end_time = time.time() <NEW_LINE> if timeout is not None and timeout > 0: <NEW_LINE> <INDENT> end_time += timeout <NEW_LINE> <DEDENT> if timeout is None: <NEW_LINE> <INDENT> wait = 0.1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> wait = max(0, timeout / 10) <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.mkdir(self.lock_file) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> err = sys.exc_info()[1] <NEW_LINE> if err.errno == errno.EEXIST: <NEW_LINE> <INDENT> if os.path.exists(self.unique_name): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if timeout is not None and time.time() > end_time: <NEW_LINE> <INDENT> if timeout > 0: <NEW_LINE> <INDENT> raise LockTimeout <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AlreadyLocked <NEW_LINE> <DEDENT> <DEDENT> time.sleep(wait) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise LockFailed("failed to create {0}".format(self.lock_file)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> open(self.unique_name, "wb").close() <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def release(self): <NEW_LINE> <INDENT> if not self.is_locked(): <NEW_LINE> <INDENT> raise NotLocked <NEW_LINE> <DEDENT> elif not os.path.exists(self.unique_name): <NEW_LINE> <INDENT> raise NotMyLock <NEW_LINE> <DEDENT> os.unlink(self.unique_name) <NEW_LINE> os.rmdir(self.lock_file) <NEW_LINE> <DEDENT> def is_locked(self): <NEW_LINE> <INDENT> return os.path.exists(self.lock_file) <NEW_LINE> <DEDENT> def i_am_locking(self): <NEW_LINE> <INDENT> return (self.is_locked() and os.path.exists(self.unique_name)) <NEW_LINE> <DEDENT> def break_lock(self): <NEW_LINE> <INDENT> if os.path.exists(self.lock_file): <NEW_LINE> <INDENT> for name in os.listdir(self.lock_file): <NEW_LINE> <INDENT> os.unlink(os.path.join(self.lock_file, name)) <NEW_LINE> <DEDENT> os.rmdir(self.lock_file)
Lock file by creating a directory.
62599032ac7a0e7691f735c6
class ModWallOpe(Operator): <NEW_LINE> <INDENT> bl_label = 'Wall' <NEW_LINE> bl_idname = 'my.modwall' <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> obj = context.object <NEW_LINE> bpy.ops.object.modifier_add(type='ARRAY') <NEW_LINE> bpy.context.object.modifiers["Array"].count = 10 <NEW_LINE> bpy.context.object.modifiers["Array"].relative_offset_displace[0] = 1 <NEW_LINE> bpy.context.object.modifiers["Array"].show_expanded = False <NEW_LINE> bpy.ops.object.modifier_add(type='ARRAY') <NEW_LINE> bpy.context.object.modifiers["Array.001"].count = 4 <NEW_LINE> bpy.context.object.modifiers["Array.001"].relative_offset_displace[0] = 0 <NEW_LINE> bpy.context.object.modifiers["Array.001"].relative_offset_displace[2] = 1 <NEW_LINE> bpy.context.object.modifiers["Array.001"].show_expanded = False <NEW_LINE> self.report({'INFO'}, 'Modular Finished!') <NEW_LINE> return {'FINISHED'}
Visualizes selected object in a modular wall
625990328e05c05ec3f6f6ca
class RepulsionForce: <NEW_LINE> <INDENT> def __init__(self, coefficient): <NEW_LINE> <INDENT> self.coefficient = coefficient <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.__class__) <NEW_LINE> <DEDENT> def apply_node_to_node(self, node1, node2): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def apply_approximation(self, node, region): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def apply_gravitation(self, node, gravity): <NEW_LINE> <INDENT> raise NotImplementedError
Here are all the formulas for attraction and repulsion.
6259903276d4e153a661dae0
class HammerFirst(CloudAPI): <NEW_LINE> <INDENT> def list_servers(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> endpoint = self._get_endpoint('compute', type_name='cloudServers') <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> log.debug(( 'Exception: HammerFirst.list_servers no firstgen endpoint' )) <NEW_LINE> return [] <NEW_LINE> <DEDENT> if endpoint is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> headers = { 'X-Auth-Token': self.auth.token, 'Accept': 'application/json', 'Content-Type': 'application/json' } <NEW_LINE> if endpoint: <NEW_LINE> <INDENT> return self.sess.get( '{0}/servers/detail'.format(endpoint), headers=headers, verify=False ).json()['servers'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def get_server(self, server): <NEW_LINE> <INDENT> endpoint = self._get_endpoint('compute', type_name='cloudServers') <NEW_LINE> headers = { 'X-Auth-Token': self.auth.token, 'Accept': 'application/json', 'Content-Type': 'application/json' } <NEW_LINE> return self.sess.get( '{0}/servers/{1}'.format(endpoint, server), headers=headers, verify=False ).json()['server']
Firstgen api library for hammercloud
625990328c3a8732951f7637
class UnsupportedS3ControlConfigurationError(BotoCoreError): <NEW_LINE> <INDENT> fmt = ( 'Unsupported configuration when using S3 Control: {msg}' )
Error when an unsupported configuration is used with S3 Control
62599032d10714528d69eefb
class MsgDetailedInfo: <NEW_LINE> <INDENT> QUALNAME = "pyrogram.raw.base.MsgDetailedInfo" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> raise TypeError("Base types can only be used for type checking purposes: " "you tried to use a base type instance as argument, " "but you need to instantiate one of its constructors instead. " "More info: https://docs.pyrogram.org/telegram/base/msg-detailed-info")
This base type has 2 constructors available. Constructors: .. hlist:: :columns: 2 - :obj:`MsgDetailedInfo <pyrogram.raw.types.MsgDetailedInfo>` - :obj:`MsgNewDetailedInfo <pyrogram.raw.types.MsgNewDetailedInfo>`
625990328a349b6b4368731d
class ContactPredictionHead(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_features, bias=True): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.regression = nn.Linear(in_features, 1, bias) <NEW_LINE> self.activation = nn.Sigmoid() <NEW_LINE> <DEDENT> def forward(self, features): <NEW_LINE> <INDENT> features = features.to(next(self.parameters())) <NEW_LINE> features = apc(symmetrize(features)) <NEW_LINE> features = features.permute(0, 2, 3, 1) <NEW_LINE> return self.activation(self.regression(features).squeeze(3))
Performs symmetrization, apc, and computes a logistic regression on the output features
62599032d164cc6175822052
class BaseURLs(object): <NEW_LINE> <INDENT> def __init__(self, values, links): <NEW_LINE> <INDENT> self.values = values <NEW_LINE> self.links = links <NEW_LINE> <DEDENT> def to_xml(self): <NEW_LINE> <INDENT> dom = etree.Element("baseURLs") <NEW_LINE> dom.set(u"xmlns", "http://docs.openstack.org/identity/api/v2.0") <NEW_LINE> for t in self.values: <NEW_LINE> <INDENT> dom.append(t.to_dom()) <NEW_LINE> <DEDENT> for t in self.links: <NEW_LINE> <INDENT> dom.append(t.to_dom()) <NEW_LINE> <DEDENT> return etree.tostring(dom) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> values = [t.to_dict()["baseURL"] for t in self.values] <NEW_LINE> links = [t.to_dict()["links"] for t in self.links] <NEW_LINE> return json.dumps({"baseURLs": {"values": values, "links": links}})
A collection of baseURls.
625990328e05c05ec3f6f6cb
class Email(XMLSerializable): <NEW_LINE> <INDENT> def __init__( self, to, subject, body, cc=None, bcc=None, content_type=None, attachments=None ): <NEW_LINE> <INDENT> super(Email, self).__init__(xml_tag='email') <NEW_LINE> self.to = to <NEW_LINE> self.subject = subject <NEW_LINE> self.body = body <NEW_LINE> self.cc = cc <NEW_LINE> self.bcc = bcc <NEW_LINE> self.content_type = content_type <NEW_LINE> self.attachments = attachments <NEW_LINE> <DEDENT> def _xml(self, doc, tag, text): <NEW_LINE> <INDENT> def format_list(strings): <NEW_LINE> <INDENT> if hasattr(strings, '__iter__') and not isinstance(strings, six.string_types): <NEW_LINE> <INDENT> return ','.join(sorted(strings)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return strings <NEW_LINE> <DEDENT> <DEDENT> with tag(self.xml_tag, xmlns='uri:oozie:email-action:0.2'): <NEW_LINE> <INDENT> with tag('to'): <NEW_LINE> <INDENT> doc.text(format_list(self.to)) <NEW_LINE> <DEDENT> with tag('subject'): <NEW_LINE> <INDENT> doc.text(self.subject) <NEW_LINE> <DEDENT> with tag('body'): <NEW_LINE> <INDENT> doc.text(self.body) <NEW_LINE> <DEDENT> if self.cc: <NEW_LINE> <INDENT> with tag('cc'): <NEW_LINE> <INDENT> doc.text(format_list(self.cc)) <NEW_LINE> <DEDENT> <DEDENT> if self.bcc: <NEW_LINE> <INDENT> with tag('bcc'): <NEW_LINE> <INDENT> doc.text(format_list(self.bcc)) <NEW_LINE> <DEDENT> <DEDENT> if self.content_type: <NEW_LINE> <INDENT> with tag('content_type'): <NEW_LINE> <INDENT> doc.text(self.content_type) <NEW_LINE> <DEDENT> <DEDENT> if self.attachments: <NEW_LINE> <INDENT> with tag('attachment'): <NEW_LINE> <INDENT> doc.text(format_list(self.attachments)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return doc
Email action for use within a workflow.
62599032be383301e02548f4
class IEC104_IO_M_EI_NA_1(IEC104_IO_Packet): <NEW_LINE> <INDENT> name = 'M_EI_NA_1' <NEW_LINE> _DEFINED_IN = [IEC104_IO_Packet.DEFINED_IN_IEC_101, IEC104_IO_Packet.DEFINED_IN_IEC_104] <NEW_LINE> _IEC104_IO_TYPE_ID = IEC104_IO_ID_M_EI_NA_1 <NEW_LINE> fields_desc = IEC104_IE_COI.informantion_element_fields
end of initialization EN 60870-5-101:2003, sec. 7.3.3.1 (p. 106)
62599032ec188e330fdf9975
class PriceModificationFixedPrice(object): <NEW_LINE> <INDENT> openapi_types = { 'type': 'str', 'price': 'PriceModificationFixedPriceHolder' } <NEW_LINE> attribute_map = { 'type': 'type', 'price': 'price' } <NEW_LINE> def __init__(self, type='FIXED_PRICE', price=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._type = None <NEW_LINE> self._price = None <NEW_LINE> self.discriminator = None <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> if price is not None: <NEW_LINE> <INDENT> self.price = price <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def price(self): <NEW_LINE> <INDENT> return self._price <NEW_LINE> <DEDENT> @price.setter <NEW_LINE> def price(self, price): <NEW_LINE> <INDENT> self._price = price <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PriceModificationFixedPrice): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PriceModificationFixedPrice): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
625990324e696a045264e692
class Linear_Sparse(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self,input_size, output_size, bias=True, cuda_device="cuda:0"): <NEW_LINE> <INDENT> super(self.__class__, self).__init__() <NEW_LINE> self.input_size=input_size <NEW_LINE> self.output_size=output_size <NEW_LINE> self.weight = torch.nn.Parameter(torch.rand(input_size)/input_size, requires_grad=True) <NEW_LINE> if bias: <NEW_LINE> <INDENT> self.bias = torch.nn.Parameter(torch.zeros(output_size), requires_grad=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bias = None <NEW_LINE> <DEDENT> self.hard_mask = torch.ones(input_size) <NEW_LINE> self.gpuavail = torch.cuda.is_available() <NEW_LINE> if self.gpuavail: <NEW_LINE> <INDENT> self.cuda_device=cuda_device <NEW_LINE> self.hard_mask = self.hard_mask.to(cuda_device) <NEW_LINE> <DEDENT> <DEDENT> def set_hard_mask(self,hard_mask): <NEW_LINE> <INDENT> self.hard_mask = hard_mask <NEW_LINE> if self.gpuavail: <NEW_LINE> <INDENT> self.hard_mask = self.hard_mask.to(self.cuda_device) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> matm=self.weight*input*self.hard_mask <NEW_LINE> if self.bias is not None: <NEW_LINE> <INDENT> matm = matm + self.bias <NEW_LINE> <DEDENT> return matm
A linear module with mask
62599032c432627299fa40d8
class adict(util.ReprMixin, util.PrivAttrKeyMixin, AttrDict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> def convert(o): <NEW_LINE> <INDENT> if isinstance(o, dict): <NEW_LINE> <INDENT> return adict((k, convert(v)) for k, v in o.items()) <NEW_LINE> <DEDENT> elif isinstance(o, (list, tuple)): <NEW_LINE> <INDENT> return type(o)(convert(v) for v in o) <NEW_LINE> <DEDENT> return o <NEW_LINE> <DEDENT> if args: <NEW_LINE> <INDENT> obj, *args = args <NEW_LINE> <DEDENT> elif kwargs: <NEW_LINE> <INDENT> obj = kwargs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> obj = {} <NEW_LINE> <DEDENT> if not isinstance(obj, types.GeneratorType): <NEW_LINE> <INDENT> obj = copy.deepcopy(obj) <NEW_LINE> <DEDENT> AttrDict.__init__(self, convert(obj), *args) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _valid_name(cls, key): <NEW_LINE> <INDENT> return ( isinstance(key, six.string_types) and re.match('^[A-Za-z_][A-Za-z0-9_]*$', key) ) <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> if key not in self or not self._valid_name(key): <NEW_LINE> <INDENT> raise AttributeError( "'{cls}' instance has no attribute '{name}'".format( cls=self.__class__.__name__, name=key ) ) <NEW_LINE> <DEDENT> return self[key]
Customized attribute dictionary type.
6259903263f4b57ef00865e4
class TankRunner(object): <NEW_LINE> <INDENT> def __init__( self, cfg, manager_queue, session_id, tank_config, first_break): <NEW_LINE> <INDENT> work_dir = os.path.join(cfg['tests_dir'], session_id) <NEW_LINE> lock_dir = cfg['lock_dir'] <NEW_LINE> load_ini_path = os.path.join(work_dir, 'load.yaml') <NEW_LINE> _log.info('Saving tank config to %s', load_ini_path) <NEW_LINE> with open(load_ini_path, 'w') as tank_config_file: <NEW_LINE> <INDENT> tank_config_file.write(six.ensure_str(tank_config)) <NEW_LINE> <DEDENT> self.tank_queue = multiprocessing.Queue() <NEW_LINE> self.set_break(first_break) <NEW_LINE> ignore_machine_defaults = cfg['ignore_machine_defaults'] <NEW_LINE> configs_location = cfg['configs_location'] <NEW_LINE> self.tank_process = multiprocessing.Process( target=yandex_tank_api.worker.run, args=( self.tank_queue, manager_queue, work_dir, lock_dir, session_id, ignore_machine_defaults, configs_location)) <NEW_LINE> self.tank_process.start() <NEW_LINE> <DEDENT> def set_break(self, next_break): <NEW_LINE> <INDENT> self.tank_queue.put({'break': next_break}) <NEW_LINE> <DEDENT> def is_alive(self): <NEW_LINE> <INDENT> return self.tank_process.exitcode is None <NEW_LINE> <DEDENT> def get_exitcode(self): <NEW_LINE> <INDENT> return self.tank_process.exitcode <NEW_LINE> <DEDENT> def join(self): <NEW_LINE> <INDENT> _log.info('Waiting for tank exit...') <NEW_LINE> return self.tank_process.join() <NEW_LINE> <DEDENT> def stop(self, remove_break): <NEW_LINE> <INDENT> if self.is_alive(): <NEW_LINE> <INDENT> sig = signal.SIGTERM if remove_break else signal.SIGINT <NEW_LINE> os.kill(self.tank_process.pid, sig) <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.stop(remove_break=True)
Manages the tank process and its working directory.
625990326e29344779b01733
class LoggedScalarScalarSample(LoggedScalarScalar): <NEW_LINE> <INDENT> def get(self, reset: bool = True) -> Optional[float]: <NEW_LINE> <INDENT> result = self.reduce() <NEW_LINE> reset = True <NEW_LINE> if reset: <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> return result
Keeps the latest sample of a scalar.
62599032d53ae8145f919547
class SettingsCredSnmpV3FailGridRemote(RemoteModel): <NEW_LINE> <INDENT> properties = ("id", "Collector", "DeviceID", "DeviceName", "DeviceType", "DeviceIPDotted", "DeviceIPNumeric", "VirtualNetworkID", "Network", "DeviceAssurance", )
| ``id:`` none | ``attribute type:`` string | ``Collector:`` none | ``attribute type:`` string | ``DeviceID:`` none | ``attribute type:`` string | ``DeviceName:`` none | ``attribute type:`` string | ``DeviceType:`` none | ``attribute type:`` string | ``DeviceIPDotted:`` none | ``attribute type:`` string | ``DeviceIPNumeric:`` none | ``attribute type:`` string | ``VirtualNetworkID:`` none | ``attribute type:`` string | ``Network:`` none | ``attribute type:`` string | ``DeviceAssurance:`` none | ``attribute type:`` string
625990336fece00bbaccca93
class VscodeCli(object): <NEW_LINE> <INDENT> def install(self): <NEW_LINE> <INDENT> var_full_path = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> var_install = input("安装vscode吗? [y/n]: ") <NEW_LINE> if var_install.lower() == "y": <NEW_LINE> <INDENT> var_host_target = input("请输入目标主机ip地址(例如: 192.168.1.20:8080): ") <NEW_LINE> var_host_target_user = input("请输入目标主机登录用户(默认: root): ") or "root" <NEW_LINE> <DEDENT> if var_install.lower() == "y": <NEW_LINE> <INDENT> var_user = input( "输入安装用户名称,程序将会安装在用户目录下的software子目录中" "(例如:用户名称“dexterleslie”,程序安装在目录/home/dexterleslie/software中):") <NEW_LINE> var_command = "ANSIBLE_HOST_KEY_CHECKING=False ansible-playbook " + var_full_path + "/role_vscode_install.yml" <NEW_LINE> var_command = cli_common.concat_command(var_command, var_host_target, var_host_target_user) <NEW_LINE> if var_user: <NEW_LINE> <INDENT> var_command = var_command + " -e varUser=" + var_user <NEW_LINE> <DEDENT> cli_common.execute_command(var_command)
管理vscode cli程序,OS support: ubuntu
6259903350485f2cf55dc061
class Square: <NEW_LINE> <INDENT> __size = None <NEW_LINE> def __init__(self, size=0): <NEW_LINE> <INDENT> if size != int(size): <NEW_LINE> <INDENT> raise TypeError('size must be an integer') <NEW_LINE> <DEDENT> if size < 0: <NEW_LINE> <INDENT> raise ValueError('size must be >= 0') <NEW_LINE> <DEDENT> self.__size = size
a class named square
62599033d10714528d69eefd
class Bullet(Sprite): <NEW_LINE> <INDENT> def __init__(self, ai_settings, screen, ship): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.screen = screen <NEW_LINE> self.rect = pygame.Rect(0, 0, ai_settings.bullet_width, ai_settings.bullet_height) <NEW_LINE> self.rect.centerx = ship.rect.centerx <NEW_LINE> self.rect.top = ship.rect.top <NEW_LINE> self.y = float(self.rect.y) <NEW_LINE> self.color = ai_settings.bullet_color <NEW_LINE> self.speed_factor = ai_settings.bullet_speed_factor <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.y -= self.speed_factor <NEW_LINE> self.rect.y = self.y <NEW_LINE> <DEDENT> def draw_bullet(self): <NEW_LINE> <INDENT> "" <NEW_LINE> pygame.draw.rect(self.screen, self.color, self.rect)
A class to manage bullets fired from the ship
6259903326068e7796d4da2f
class BatchBuildOperation(BatchOperation): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(BatchBuildOperation, self).__init__() <NEW_LINE> self.valid_keys = models.BUILD_VALID_KEYS
A batch operation for the `build` collection.
625990339b70327d1c57fe69
class Solution: <NEW_LINE> <INDENT> def canPermutePalindrome(self, s): <NEW_LINE> <INDENT> d = {ll:0 for ll in s} <NEW_LINE> for ll in s: <NEW_LINE> <INDENT> if d[ll] is 0: <NEW_LINE> <INDENT> d[ll] = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d[ll] = 0 <NEW_LINE> <DEDENT> <DEDENT> num = 0 <NEW_LINE> for ll in d: <NEW_LINE> <INDENT> num += d[ll] <NEW_LINE> <DEDENT> if num > 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
@param s: the given string @return: if a permutation of the string could form a palindrome
62599033a8ecb03325872302
class DummyContainer(TestcaseContainer): <NEW_LINE> <INDENT> def __init__(self, logger=None): <NEW_LINE> <INDENT> super(DummyContainer, self).__init__(logger) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def find_testcases(modulename, moduleroot, tc_meta_schema, path=None, suiteconfig=None, logger=None): <NEW_LINE> <INDENT> dummycontainer = DummyContainer(logger) <NEW_LINE> dummycontainer.tcname = modulename <NEW_LINE> dummycontainer._modulename = modulename <NEW_LINE> dummycontainer.status = TestStatus.PENDING <NEW_LINE> dummycontainer._instance = None <NEW_LINE> dummycontainer._final_configuration = {} <NEW_LINE> dummycontainer._moduleroot = moduleroot <NEW_LINE> dummycontainer._meta_schema = tc_meta_schema <NEW_LINE> dummycontainer._result = None <NEW_LINE> dummycontainer._filepath = path <NEW_LINE> dummycontainer._suiteconfig = suiteconfig if suiteconfig else {} <NEW_LINE> return dummycontainer <NEW_LINE> <DEDENT> def run(self, forceflash=False): <NEW_LINE> <INDENT> return self._result <NEW_LINE> <DEDENT> def get(self, field): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def set_final_config(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def set_result_verdict(self, reason): <NEW_LINE> <INDENT> if not self._result: <NEW_LINE> <INDENT> self._result = Result() <NEW_LINE> <DEDENT> self._result.set_verdict(verdict="inconclusive", retcode=ReturnCodes.RETCODE_FAIL_INCONCLUSIVE, duration=0) <NEW_LINE> self._result.tc_metadata["name"] = self.tcname <NEW_LINE> self._result.fail_reason = reason <NEW_LINE> <DEDENT> def validate_tc_instance(self): <NEW_LINE> <INDENT> return True
Class DummyContainer subclasses TestcaseContainer, acts as a dummy object for listing test cases that were not found when importing test cases.
6259903307d97122c4217d8c
class Indicator(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.strategy_id = '' <NEW_LINE> self.account_id = '' <NEW_LINE> self.nav = 0.0 <NEW_LINE> self.pnl = 0.0 <NEW_LINE> self.profit_ratio = 0.0 <NEW_LINE> self.profit_ratio_bench = 0.0 <NEW_LINE> self.sharp_ratio = 0.0 <NEW_LINE> self.risk_ratio = 0.0 <NEW_LINE> self.trade_count = 0 <NEW_LINE> self.win_count = 0 <NEW_LINE> self.lose_count = 0 <NEW_LINE> self.win_ratio = 0.0 <NEW_LINE> self.max_profit = 0.0 <NEW_LINE> self.min_profit = 0.0 <NEW_LINE> self.max_single_trade_profit = 0.0 <NEW_LINE> self.min_single_trade_profit = 0.0 <NEW_LINE> self.daily_max_single_trade_profit = 0.0 <NEW_LINE> self.daily_min_single_trade_profit = 0.0 <NEW_LINE> self.max_position_value = 0.0 <NEW_LINE> self.min_position_value = 0.0 <NEW_LINE> self.max_drawdown = 0.0 <NEW_LINE> self.daily_pnl = 0.0 <NEW_LINE> self.daily_return = 0.0 <NEW_LINE> self.annual_return = 0.0 <NEW_LINE> self.cum_inout = 0.0 <NEW_LINE> self.cum_trade = 0.0 <NEW_LINE> self.cum_pnl = 0.0 <NEW_LINE> self.cum_commission = 0.0 <NEW_LINE> self.transact_time = 0.0
账户业绩指标
62599033d53ae8145f919549
class DatasetTestBase(test.TestCase): <NEW_LINE> <INDENT> def _assert_datasets_equal(self, dataset1, dataset2): <NEW_LINE> <INDENT> next1 = dataset1.make_one_shot_iterator().get_next() <NEW_LINE> next2 = dataset2.make_one_shot_iterator().get_next() <NEW_LINE> with self.cached_session() as sess: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> op1 = sess.run(next1) <NEW_LINE> <DEDENT> except errors.OutOfRangeError: <NEW_LINE> <INDENT> with self.assertRaises(errors.OutOfRangeError): <NEW_LINE> <INDENT> sess.run(next2) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> op2 = sess.run(next2) <NEW_LINE> op1 = nest.flatten(op1) <NEW_LINE> op2 = nest.flatten(op2) <NEW_LINE> assert len(op1) == len(op2) <NEW_LINE> for i in range(len(op1)): <NEW_LINE> <INDENT> self.assertAllEqual(op1[i], op2[i]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _assert_datasets_raise_same_error(self, dataset1, dataset2, exception_class, replacements=None): <NEW_LINE> <INDENT> next1, next2 = dataset1.make_one_shot_iterator().get_next(), dataset2.make_one_shot_iterator().get_next() <NEW_LINE> with self.cached_session() as sess: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sess.run(next1) <NEW_LINE> raise ValueError( "Expected dataset to raise an error of type %s, but it did not." % repr(exception_class)) <NEW_LINE> <DEDENT> except exception_class as e: <NEW_LINE> <INDENT> expected_message = e.message <NEW_LINE> for old, new, count in replacements: <NEW_LINE> <INDENT> expected_message = expected_message.replace(old, new, count) <NEW_LINE> <DEDENT> with self.assertRaisesRegexp(exception_class, re.escape(expected_message)): <NEW_LINE> <INDENT> sess.run(next2)
Base class for dataset tests.
62599033d6c5a102081e320b
class LeoAssaultsSummary(object): <NEW_LINE> <INDENT> TABLENAME = 'leo_assaults_summary' <NEW_LINE> COLUMNS = [ ColumnDefinition('year', 'INT'), ColumnDefinition('county', 'TEXT'), ColumnDefinition('injury_status', 'TEXT'), ColumnDefinition('assaults', 'INT'), ColumnDefinition('population', 'INT'), ColumnDefinition('assaults_per_per_100000', 'FLOAT'), ]
Table containing CA LEO assaults summary data. Would be better were we to create this from an event level dataset.
6259903396565a6dacd2d800
class UpdateOwnProfile(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.id == request.user.id
Allow user to edit their own profile only
6259903366673b3332c314d8
class UserAlreadyExists(QSDockerException): <NEW_LINE> <INDENT> def __init__(self, message="User already exists in the system", error="User exists", status_code=409): <NEW_LINE> <INDENT> super(UserAlreadyExists,self).__init__(message=message, error=error, status_code=status_code)
An exception class to be thrown when trying to register a user which already exists in the system.
62599033d4950a0f3b1116b1
class StdOutListener(StreamListener): <NEW_LINE> <INDENT> def __init__(self, fetched_tweets_filename): <NEW_LINE> <INDENT> self.fetched_tweets_filename = fetched_tweets_filename <NEW_LINE> <DEDENT> def on_data(self, data, data_print = False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if data_print == True: <NEW_LINE> <INDENT> print(data) <NEW_LINE> <DEDENT> with open(self.fetched_tweets_filename, 'a') as tf: <NEW_LINE> <INDENT> tf.write(data) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> except BaseException as e: <NEW_LINE> <INDENT> print("Error on_data %s" % str(e)) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def on_error(self, status): <NEW_LINE> <INDENT> print(status)
This is a basic listener that just prints received tweets to stdout.
62599033be383301e02548fa
class TestOnData(object): <NEW_LINE> <INDENT> @pytest.fixture(autouse=True) <NEW_LINE> def set_common_fixtures(self): <NEW_LINE> <INDENT> filepath = os.path.dirname(os.path.realpath(__file__)) <NEW_LINE> testfilepath = os.path.join(filepath, 'test_files') <NEW_LINE> corpus = USPublications(testfilepath) <NEW_LINE> self.patent_doc = next(corpus.iter_xml()).to_patentdoc() <NEW_LINE> <DEDENT> def test_features(self): <NEW_LINE> <INDENT> pd = self.patent_doc <NEW_LINE> assert "39 claims" in pd.__repr__() <NEW_LINE> assert pd.description.get_paragraph(26).sentence_count == 6 <NEW_LINE> assert pd.description.sentence_count == 194 <NEW_LINE> assert ( "siderail" in pd.description.get_paragraph(26).sentences[2].words ) <NEW_LINE> <DEDENT> def test_counters(self): <NEW_LINE> <INDENT> pd = self.patent_doc <NEW_LINE> assert pd.vocabulary == 737 <NEW_LINE> assert pd.unique_characters == 65
Testing functions on Patent Example.
62599033796e427e5384f863
class OpenXMLParserFormatterTest(test_lib.EventFormatterTestCase): <NEW_LINE> <INDENT> def testInitialization(self): <NEW_LINE> <INDENT> event_formatter = oxml.OpenXMLParserFormatter() <NEW_LINE> self.assertNotEqual(event_formatter, None) <NEW_LINE> <DEDENT> def testGetFormatStringAttributeNames(self): <NEW_LINE> <INDENT> event_formatter = oxml.OpenXMLParserFormatter() <NEW_LINE> expected_attribute_names = [ u'creating_app', u'app_version', u'title', u'subject', u'last_saved_by', u'author', u'total_edit_time', u'keywords', u'comments', u'revision_number', u'template', u'number_of_pages', u'number_of_words', u'number_of_characters', u'number_of_characters_with_spaces', u'number_of_lines', u'company', u'manager', u'shared', u'security', u'hyperlinks_changed', u'links_up_to_date', u'scale_crop', u'dig_sig', u'slides', u'hidden_slides', u'presentation_format', u'mm_clips', u'notes'] <NEW_LINE> self._TestGetFormatStringAttributeNames( event_formatter, expected_attribute_names)
Tests for the OXML event formatter.
62599033d164cc6175822059
class FritzDectSwitch(SwitchDevice): <NEW_LINE> <INDENT> def __init__(self, hass, data, name): <NEW_LINE> <INDENT> self.units = hass.config.units <NEW_LINE> self.data = data <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> attrs = {} <NEW_LINE> if self.data.has_powermeter and self.data.current_consumption is not None and self.data.total_consumption is not None: <NEW_LINE> <INDENT> attrs[ATTR_CURRENT_CONSUMPTION] = "{:.1f}".format( self.data.current_consumption) <NEW_LINE> attrs[ATTR_CURRENT_CONSUMPTION_UNIT] = "{}".format( ATTR_CURRENT_CONSUMPTION_UNIT_VALUE) <NEW_LINE> attrs[ATTR_TOTAL_CONSUMPTION] = "{:.3f}".format( self.data.total_consumption) <NEW_LINE> attrs[ATTR_TOTAL_CONSUMPTION_UNIT] = "{}".format( ATTR_TOTAL_CONSUMPTION_UNIT_VALUE) <NEW_LINE> <DEDENT> if self.data.has_temperature and self.data.temperature is not None: <NEW_LINE> <INDENT> attrs[ATTR_TEMPERATURE] = "{}".format( self.units.temperature(self.data.temperature, TEMP_CELSIUS)) <NEW_LINE> attrs[ATTR_TEMPERATURE_UNIT] = "{}".format( self.units.temperature_unit) <NEW_LINE> <DEDENT> return attrs <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_power_watt(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return float(self.data.current_consumption) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self.data.state <NEW_LINE> <DEDENT> def turn_on(self, **kwargs): <NEW_LINE> <INDENT> if not self.data.is_online: <NEW_LINE> <INDENT> _LOGGER.error("turn_on: Not online skipping request") <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> actor = self.data.fritz.get_actor_by_ain(self.data.ain) <NEW_LINE> actor.switch_on() <NEW_LINE> <DEDENT> except (RequestException, HTTPError): <NEW_LINE> <INDENT> _LOGGER.error("Fritz!Box query failed, triggering relogin") <NEW_LINE> self.data.is_online = False <NEW_LINE> <DEDENT> <DEDENT> def turn_off(self): <NEW_LINE> <INDENT> if not self.data.is_online: <NEW_LINE> <INDENT> _LOGGER.error("turn_off: Not online skipping request") <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> actor = self.data.fritz.get_actor_by_ain(self.data.ain) <NEW_LINE> actor.switch_off() <NEW_LINE> <DEDENT> except (RequestException, HTTPError): <NEW_LINE> <INDENT> _LOGGER.error("Fritz!Box query failed, triggering relogin") <NEW_LINE> self.data.is_online = False <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> if not self.data.is_online: <NEW_LINE> <INDENT> _LOGGER.error("update: Not online, logging back in") <NEW_LINE> try: <NEW_LINE> <INDENT> self.data.fritz.login() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> _LOGGER.error("Login to Fritz!Box failed") <NEW_LINE> return <NEW_LINE> <DEDENT> self.data.is_online = True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.data.update() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> _LOGGER.error("Fritz!Box query failed, triggering relogin") <NEW_LINE> self.data.is_online = False
Representation of a FRITZ!DECT switch.
625990333eb6a72ae038b74f
class ZMQBaseComponent(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._context = zmq.Context() <NEW_LINE> self._loop = None <NEW_LINE> self.dataset = [] <NEW_LINE> <DEDENT> def _prepare_reactor(self): <NEW_LINE> <INDENT> raise NotImplementedError(self._prepare_reactor) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> raise NotImplementedError(self.start) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> raise NotImplementedError(self.stop)
Base class for zmq components.
62599033cad5886f8bdc58ef
class TestRsyncRemote(rsync_base.TestRsyncBase): <NEW_LINE> <INDENT> def test__init_directory_structure(self): <NEW_LINE> <INDENT> for remote in self.rsyncd[self.testname]: <NEW_LINE> <INDENT> url = remote.url + '/initial_test_path/test-subdir' <NEW_LINE> rsync = RsyncRemote(url, init_directory_structure=True) <NEW_LINE> self.assertTrue(os.path.isdir(remote.path)) <NEW_LINE> del rsync <NEW_LINE> <DEDENT> <DEDENT> def test_push(self): <NEW_LINE> <INDENT> for remote in self.rsyncd[self.testname]: <NEW_LINE> <INDENT> temp_dir = TempFiles() <NEW_LINE> self.getDataFile(os.path.join(temp_dir.last_temp_dir, 'dir1/dir2/dir3/test_data.txt')) <NEW_LINE> rsync = RsyncRemote(remote.url) <NEW_LINE> rsync.push(os.path.join(temp_dir.last_temp_dir, 'dir1')) <NEW_LINE> self.assertDirsEqual(remote.path, temp_dir.last_temp_dir)
Test case class for rsync_remote module
6259903350485f2cf55dc065
class ProportionalSelector(Selector): <NEW_LINE> <INDENT> def select(self, population): <NEW_LINE> <INDENT> n = len(population) <NEW_LINE> sum_fitness = sum(x.fitness for x in population) <NEW_LINE> probabilty = [(x.fitness/sum_fitness) for x in population] <NEW_LINE> return random.choice(range(n), size=self._size, replace=True, p=probabilty)
Selects using a probabilty distribution given the normalized values of each genome's fitness
6259903315baa72349463082
class RemoveUTCTimestampTest(fixtures.TablesTest): <NEW_LINE> <INDENT> __only_on__ = 'mysql' <NEW_LINE> __backend__ = True <NEW_LINE> @classmethod <NEW_LINE> def define_tables(cls, metadata): <NEW_LINE> <INDENT> Table( 't', metadata, Column('id', Integer, primary_key=True), Column('x', Integer), Column('data', DateTime) ) <NEW_LINE> Table( 't_default', metadata, Column('id', Integer, primary_key=True), Column('x', Integer), Column('idata', DateTime, default=func.utc_timestamp()), Column('udata', DateTime, onupdate=func.utc_timestamp()) ) <NEW_LINE> <DEDENT> def test_insert_executemany(self): <NEW_LINE> <INDENT> with testing.db.connect() as conn: <NEW_LINE> <INDENT> conn.execute( self.tables.t.insert().values(data=func.utc_timestamp()), [{"x": 5}, {"x": 6}, {"x": 7}] ) <NEW_LINE> <DEDENT> <DEDENT> def test_update_executemany(self): <NEW_LINE> <INDENT> with testing.db.connect() as conn: <NEW_LINE> <INDENT> timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2) <NEW_LINE> conn.execute( self.tables.t.insert(), [ {"x": 5, "data": timestamp}, {"x": 6, "data": timestamp}, {"x": 7, "data": timestamp}] ) <NEW_LINE> conn.execute( self.tables.t.update(). values(data=func.utc_timestamp()). where(self.tables.t.c.x == bindparam('xval')), [{"xval": 5}, {"xval": 6}, {"xval": 7}] ) <NEW_LINE> <DEDENT> <DEDENT> def test_insert_executemany_w_default(self): <NEW_LINE> <INDENT> with testing.db.connect() as conn: <NEW_LINE> <INDENT> conn.execute( self.tables.t_default.insert(), [{"x": 5}, {"x": 6}, {"x": 7}] ) <NEW_LINE> <DEDENT> <DEDENT> def test_update_executemany_w_default(self): <NEW_LINE> <INDENT> with testing.db.connect() as conn: <NEW_LINE> <INDENT> timestamp = datetime.datetime(2015, 4, 17, 18, 5, 2) <NEW_LINE> conn.execute( self.tables.t_default.insert(), [ {"x": 5, "idata": timestamp}, {"x": 6, "idata": timestamp}, {"x": 7, "idata": timestamp}] ) <NEW_LINE> conn.execute( self.tables.t_default.update(). values(idata=func.utc_timestamp()). where(self.tables.t_default.c.x == bindparam('xval')), [{"xval": 5}, {"xval": 6}, {"xval": 7}] )
This test exists because we removed the MySQL dialect's override of the UTC_TIMESTAMP() function, where the commit message for this feature stated that "it caused problems with executemany()". Since no example was provided, we are trying lots of combinations here. [ticket:3966]
6259903376d4e153a661dae5
class SignerReminderEmail(BaseMailerService): <NEW_LINE> <INDENT> name = 'Signer Reminder Email' <NEW_LINE> subject = '[ACTION REQUIRED] Invitation to sign a document' <NEW_LINE> email_template = 'sign_reminder'
m = SignerReminderEmail(recipients=(('Alex', '[email protected]'),), from_tuple=(from_user.get_full_name(), from_user.email,)) m.process(subject='[ACTION REQUIRED] Reminder to sign', action_url='http://lawpal.com/etc/')
6259903330c21e258be998f5
class YouBlockedUser(BadRequest): <NEW_LINE> <INDENT> ID = "YOU_BLOCKED_USER" <NEW_LINE> MESSAGE = __doc__
You blocked this user
62599033ec188e330fdf997d
class GoddessWallFrenchLanguage(BaseElement): <NEW_LINE> <INDENT> @property <NEW_LINE> def selector(self): <NEW_LINE> <INDENT> return (By.XPATH,"\\android.support.v7.widget.RecyclerView[@resource-id='com.videochat.livu:id/recycle_goddess']/android.widget.FrameLayout[2]/android.widget.TextView")
Goddess wall French language
62599033e76e3b2f99fd9af5
class ReporterContextQueue(ReporterContext): <NEW_LINE> <INDENT> def __init__(self, kind=None, verbosity=Reporter.DEFAULT, queue=None, prefix=None): <NEW_LINE> <INDENT> ReporterContext.__init__(self, kind, verbosity, None, prefix) <NEW_LINE> if queue is None: <NEW_LINE> <INDENT> queue = multiprocessing.Queue() <NEW_LINE> <DEDENT> self.queue = queue <NEW_LINE> self.closed = False <NEW_LINE> self._messages_pending = [] <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._send_pending_messages() <NEW_LINE> self.closed = True <NEW_LINE> <DEDENT> def is_closed(self): <NEW_LINE> <INDENT> return self.closed <NEW_LINE> <DEDENT> def write(self, message): <NEW_LINE> <INDENT> self._messages_pending.append(message) <NEW_LINE> self._send_pending_messages() <NEW_LINE> <DEDENT> def _send_pending_messages(self): <NEW_LINE> <INDENT> for message in list(self._messages_pending): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.queue.put(self._messages_pending[0], block=False) <NEW_LINE> <DEDENT> except Queue.Full: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self._messages_pending.pop(0)
A context for the reporter object. It has the following attributes: kind: The message kind to report to this context. (Reporter.KIND_ERR, Reporter.KIND_ERR or None.) verbosity: The verbosity of this context. queue: The multiprocessing.Queue. prefix: The default message prefix (str or callable).
625990338c3a8732951f7641
class SimplePlayer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.position = maths.VECTOR3() <NEW_LINE> self.viewDirection = maths.VECTOR3() <NEW_LINE> self.teamId = 0 <NEW_LINE> self.address = 0x0 <NEW_LINE> <DEDENT> def initialise(self): <NEW_LINE> <INDENT> self.position = maths.VECTOR3() <NEW_LINE> self.viewDirection = maths.VECTOR3() <NEW_LINE> self.teamId = 0 <NEW_LINE> self.address = 0x0 <NEW_LINE> <DEDENT> def setPosition(self, posVec): <NEW_LINE> <INDENT> self.position.x = posVec.x <NEW_LINE> self.position.y = posVec.y <NEW_LINE> self.position.z = posVec.z <NEW_LINE> <DEDENT> def setPositionN(self, x, y, z): <NEW_LINE> <INDENT> self.position.x = x <NEW_LINE> self.position.y = y <NEW_LINE> self.position.z = z <NEW_LINE> <DEDENT> def setTeamId(self, teamId): <NEW_LINE> <INDENT> self.teamId = teamId <NEW_LINE> <DEDENT> def setAddress(self, address): <NEW_LINE> <INDENT> self.address = address
Holding the esp-friendly player data
625990338c3a8732951f7642
class CharacterClassViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = models.CharacterClass.objects.all() <NEW_LINE> serializer_class = serializers.CharacterClassSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticatedOrReadOnly,)
This viewset automatically provides a 'list', 'create', 'retrieve', 'update', and 'destroy' actions
62599033ac7a0e7691f735d2
class ApiFlowResultsRendererRegressionTest( api_test_lib.ApiCallRendererRegressionTest): <NEW_LINE> <INDENT> renderer = "ApiFlowResultsRenderer" <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(ApiFlowResultsRendererRegressionTest, self).setUp() <NEW_LINE> self.client_id = self.SetupClients(1)[0] <NEW_LINE> <DEDENT> def Run(self): <NEW_LINE> <INDENT> runner_args = flow_runner.FlowRunnerArgs(flow_name="GetFile") <NEW_LINE> flow_args = transfer.GetFileArgs( pathspec=rdf_paths.PathSpec( path="/tmp/evil.txt", pathtype=rdf_paths.PathSpec.PathType.OS)) <NEW_LINE> client_mock = test_lib.SampleHuntMock() <NEW_LINE> with test_lib.FakeTime(42): <NEW_LINE> <INDENT> flow_urn = flow.GRRFlow.StartFlow(client_id=self.client_id, args=flow_args, runner_args=runner_args, token=self.token) <NEW_LINE> for _ in test_lib.TestFlowHelper(flow_urn, client_mock=client_mock, client_id=self.client_id, token=self.token): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.Check("GET", "/api/clients/%s/flows/%s/results" % (self.client_id.Basename(), flow_urn.Basename()), replace={flow_urn.Basename(): "W:ABCDEF"})
Regression test for ApiFlowResultsRenderer.
625990339b70327d1c57fe6f
class WeightRecorder(object): <NEW_LINE> <INDENT> def __init__(self, sampling_interval, projection): <NEW_LINE> <INDENT> self.interval = sampling_interval <NEW_LINE> self.projection = projection <NEW_LINE> self._weights = [] <NEW_LINE> <DEDENT> def __call__(self, t): <NEW_LINE> <INDENT> self._weights.append(self.projection.get('weight', format='list', with_address=False)) <NEW_LINE> return t + self.interval <NEW_LINE> <DEDENT> def get_weights(self): <NEW_LINE> <INDENT> signal = neo.AnalogSignal(self._weights, units='nA', sampling_period=self.interval * ms, name="weight") <NEW_LINE> signal.channel_index = neo.ChannelIndex(np.arange(len(self._weights[0]))) <NEW_LINE> return signal
Recording of weights is not yet built in to PyNN, so therefore we need to construct a callback object, which reads the current weights from the projection at regular intervals.
6259903330c21e258be998f7
class TestCreatingLargeObjects(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.c = connect() <NEW_LINE> self.c.query('begin') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.c.query('rollback') <NEW_LINE> self.c.close() <NEW_LINE> <DEDENT> def assertIsLargeObject(self, obj): <NEW_LINE> <INDENT> self.assertIsNotNone(obj) <NEW_LINE> self.assertTrue(hasattr(obj, 'open')) <NEW_LINE> self.assertTrue(hasattr(obj, 'close')) <NEW_LINE> self.assertTrue(hasattr(obj, 'oid')) <NEW_LINE> self.assertTrue(hasattr(obj, 'pgcnx')) <NEW_LINE> self.assertTrue(hasattr(obj, 'error')) <NEW_LINE> self.assertIsInstance(obj.oid, int) <NEW_LINE> self.assertNotEqual(obj.oid, 0) <NEW_LINE> self.assertIs(obj.pgcnx, self.c) <NEW_LINE> self.assertIsInstance(obj.error, str) <NEW_LINE> self.assertFalse(obj.error) <NEW_LINE> <DEDENT> def testLoCreate(self): <NEW_LINE> <INDENT> large_object = self.c.locreate(pg.INV_READ | pg.INV_WRITE) <NEW_LINE> try: <NEW_LINE> <INDENT> self.assertIsLargeObject(large_object) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> del large_object <NEW_LINE> <DEDENT> <DEDENT> def testGetLo(self): <NEW_LINE> <INDENT> large_object = self.c.locreate(pg.INV_READ | pg.INV_WRITE) <NEW_LINE> try: <NEW_LINE> <INDENT> self.assertIsLargeObject(large_object) <NEW_LINE> oid = large_object.oid <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> del large_object <NEW_LINE> <DEDENT> data = b'some data to be shared' <NEW_LINE> large_object = self.c.getlo(oid) <NEW_LINE> try: <NEW_LINE> <INDENT> self.assertIsLargeObject(large_object) <NEW_LINE> self.assertEqual(large_object.oid, oid) <NEW_LINE> large_object.open(pg.INV_WRITE) <NEW_LINE> large_object.write(data) <NEW_LINE> large_object.close() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> del large_object <NEW_LINE> <DEDENT> large_object = self.c.getlo(oid) <NEW_LINE> try: <NEW_LINE> <INDENT> self.assertIsLargeObject(large_object) <NEW_LINE> self.assertEqual(large_object.oid, oid) <NEW_LINE> large_object.open(pg.INV_READ) <NEW_LINE> r = large_object.read(80) <NEW_LINE> large_object.close() <NEW_LINE> large_object.unlink() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> del large_object <NEW_LINE> <DEDENT> self.assertIsInstance(r, bytes) <NEW_LINE> self.assertEqual(r, data) <NEW_LINE> <DEDENT> def testLoImport(self): <NEW_LINE> <INDENT> if windows: <NEW_LINE> <INDENT> fname = 'temp_test_pg_largeobj_import.txt' <NEW_LINE> f = open(fname, 'wb') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f = tempfile.NamedTemporaryFile() <NEW_LINE> fname = f.name <NEW_LINE> <DEDENT> data = b'some data to be imported' <NEW_LINE> f.write(data) <NEW_LINE> if windows: <NEW_LINE> <INDENT> f.close() <NEW_LINE> f = open(fname, 'rb') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> f.flush() <NEW_LINE> f.seek(0) <NEW_LINE> <DEDENT> large_object = self.c.loimport(f.name) <NEW_LINE> try: <NEW_LINE> <INDENT> f.close() <NEW_LINE> if windows: <NEW_LINE> <INDENT> os.remove(fname) <NEW_LINE> <DEDENT> self.assertIsLargeObject(large_object) <NEW_LINE> large_object.open(pg.INV_READ) <NEW_LINE> large_object.seek(0, pg.SEEK_SET) <NEW_LINE> r = large_object.size() <NEW_LINE> self.assertIsInstance(r, int) <NEW_LINE> self.assertEqual(r, len(data)) <NEW_LINE> r = large_object.read(80) <NEW_LINE> self.assertIsInstance(r, bytes) <NEW_LINE> self.assertEqual(r, data) <NEW_LINE> large_object.close() <NEW_LINE> large_object.unlink() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> del large_object
Test creating large objects using a connection.
62599033a8ecb03325872308
class TbSetmealfoodmappSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = TbSetmealfoodmapp <NEW_LINE> fields = ( 'mealfoodmapid', 'temp_commonfoodid', 'temp_setmealinfoid', 'mealfoodmapremarks', )
套餐食物映射表
62599033ec188e330fdf997f
class Account(ndb.Model): <NEW_LINE> <INDENT> email = ndb.StringProperty() <NEW_LINE> nickname = ndb.StringProperty() <NEW_LINE> registered = ndb.BooleanProperty() <NEW_LINE> permissions = ndb.IntegerProperty(repeated=True) <NEW_LINE> shadow_banned = ndb.BooleanProperty(default=False) <NEW_LINE> created = ndb.DateTimeProperty(auto_now_add=True) <NEW_LINE> updated = ndb.DateTimeProperty(auto_now=True, indexed=False) <NEW_LINE> display_name = ndb.StringProperty()
Accounts represent accounts people use on TBA.
625990338c3a8732951f7643
class TableMapEvent(BinLogEvent): <NEW_LINE> <INDENT> def __init__(self, from_packet, event_size, table_map, ctl_connection, **kwargs): <NEW_LINE> <INDENT> super().__init__(from_packet, event_size, table_map, ctl_connection, **kwargs) <NEW_LINE> self._only_tables = kwargs["only_tables"] <NEW_LINE> self._only_schemas = kwargs["only_schemas"] <NEW_LINE> self._freeze_schema = kwargs["freeze_schema"] <NEW_LINE> self.table_id = self._read_table_id() <NEW_LINE> if self.table_id in table_map and self._freeze_schema: <NEW_LINE> <INDENT> self._processed = False <NEW_LINE> return <NEW_LINE> <DEDENT> self.flags = struct.unpack('<H', self.packet.read(2))[0] <NEW_LINE> self.schema_length = byte2int(self.packet.read(1)) <NEW_LINE> self.schema = self.packet.read(self.schema_length).decode() <NEW_LINE> self.packet.advance(1) <NEW_LINE> self.table_length = byte2int(self.packet.read(1)) <NEW_LINE> self.table = self.packet.read(self.table_length).decode() <NEW_LINE> if (self._only_tables is not None and self.table not in self._only_tables): <NEW_LINE> <INDENT> self._processed = False <NEW_LINE> return <NEW_LINE> <DEDENT> if self._only_schemas is not None and self.schema not in self._only_schemas: <NEW_LINE> <INDENT> self._processed = False <NEW_LINE> return <NEW_LINE> <DEDENT> self.packet.advance(1) <NEW_LINE> self.column_count = self.packet.read_length_coded_binary() <NEW_LINE> self.columns = [] <NEW_LINE> self._table_map = table_map <NEW_LINE> self._from_packet = from_packet <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def load_table_schema(self): <NEW_LINE> <INDENT> if self.table_id in self._table_map: <NEW_LINE> <INDENT> self.column_schemas = self.table_map[self.table_id].column_schemas <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tbl_info = self._ctl_connection._get_table_information <NEW_LINE> self.column_schemas = yield from tbl_info(self.schema, self.table) <NEW_LINE> <DEDENT> column_types = list(self.packet.read(self.column_count)) <NEW_LINE> self.packet.read_length_coded_binary() <NEW_LINE> for i in range(0, len(column_types)): <NEW_LINE> <INDENT> column_type = column_types[i] <NEW_LINE> column_schema = self.column_schemas[i] <NEW_LINE> col = Column(byte2int(column_type), column_schema, self._from_packet) <NEW_LINE> self.columns.append(col) <NEW_LINE> <DEDENT> self.table_obj = Table(self.column_schemas, self.table_id, self.schema, self.table, self.columns) <NEW_LINE> <DEDENT> def get_table(self): <NEW_LINE> <INDENT> return self.table_obj <NEW_LINE> <DEDENT> def _dump(self): <NEW_LINE> <INDENT> super(TableMapEvent, self)._dump() <NEW_LINE> print("Table id: %d" % (self.table_id)) <NEW_LINE> print("Schema: %s" % (self.schema)) <NEW_LINE> print("Table: %s" % (self.table)) <NEW_LINE> print("Columns: %s" % (self.column_count))
This evenement describe the structure of a table. It's send before a change append on a table. A end user of the lib should have no usage of this
625990336fece00bbaccca9b
class HungarianMatcher(nn.Module): <NEW_LINE> <INDENT> def __init__( self, cost_class: float = 1, cost_bbox: float = 1, cost_giou: float = 1, use_focal_loss=False, ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.cost_class = cost_class <NEW_LINE> self.cost_bbox = cost_bbox <NEW_LINE> self.cost_giou = cost_giou <NEW_LINE> assert ( cost_class != 0 or cost_bbox != 0 or cost_giou != 0 ), "all costs cant be 0" <NEW_LINE> self.use_focal_loss = use_focal_loss <NEW_LINE> <DEDENT> @torch.no_grad() <NEW_LINE> def forward(self, outputs, targets): <NEW_LINE> <INDENT> bs, num_queries = outputs["pred_logits"].shape[:2] <NEW_LINE> if self.use_focal_loss: <NEW_LINE> <INDENT> out_prob = outputs["pred_logits"].flatten(0, 1).sigmoid() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out_prob = ( outputs["pred_logits"].flatten(0, 1).softmax(-1) ) <NEW_LINE> <DEDENT> out_bbox = outputs["pred_boxes"].flatten(0, 1) <NEW_LINE> tgt_ids = torch.cat([v["labels"] for v in targets]) <NEW_LINE> tgt_bbox = torch.cat([v["boxes"] for v in targets]) <NEW_LINE> if self.use_focal_loss: <NEW_LINE> <INDENT> alpha = 0.25 <NEW_LINE> gamma = 2.0 <NEW_LINE> neg_cost_class = ( (1 - alpha) * (out_prob ** gamma) * (-(1 - out_prob + 1e-8).log()) ) <NEW_LINE> pos_cost_class = ( alpha * ((1 - out_prob) ** gamma) * (-(out_prob + 1e-8).log()) ) <NEW_LINE> cost_class = pos_cost_class[:, tgt_ids] - neg_cost_class[:, tgt_ids] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cost_class = -out_prob[ :, tgt_ids ] <NEW_LINE> <DEDENT> cost_bbox = torch.cdist( out_bbox, tgt_bbox, p=1 ) <NEW_LINE> cost_giou = -generalized_box_iou( box_cxcywh_to_xyxy(out_bbox), box_cxcywh_to_xyxy(tgt_bbox) ) <NEW_LINE> C = ( self.cost_bbox * cost_bbox + self.cost_class * cost_class + self.cost_giou * cost_giou ) <NEW_LINE> C = C.view( bs, num_queries, -1 ).cpu() <NEW_LINE> sizes = [len(v["boxes"]) for v in targets] <NEW_LINE> indices = [ linear_sum_assignment(c[i]) for i, c in enumerate(C.split(sizes, -1)) ] <NEW_LINE> return [ ( torch.as_tensor(i, dtype=torch.int64), torch.as_tensor(j, dtype=torch.int64), ) for i, j in indices ]
This class computes an assignment between the targets and the predictions of the network For efficiency reasons, the targets don't include the no_object. Because of this, in general, there are more predictions than targets. In this case, we do a 1-to-1 matching of the best predictions, while the others are un-matched (and thus treated as non-objects).
62599033d53ae8145f91954f
class DBStorage: <NEW_LINE> <INDENT> __engine = None <NEW_LINE> __session = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.__engine = create_engine( 'mysql+mysqldb://{}:{}@{}/{}'.format( os.environ.get('HBNB_MYSQL_USER'), os.environ.get('HBNB_MYSQL_PWD'), os.environ.get('HBNB_MYSQL_HOST'), os.environ.get('HBNB_MYSQL_DB'))) <NEW_LINE> if os.environ.get('HBNB_ENV') == 'test': <NEW_LINE> <INDENT> Base.metadata.drop_all(self.__engine) <NEW_LINE> <DEDENT> <DEDENT> def all(self, cls=None): <NEW_LINE> <INDENT> dict = {} <NEW_LINE> if cls is None: <NEW_LINE> <INDENT> objects = self.__session.query().all() <NEW_LINE> print('OBJ', objects) <NEW_LINE> for item in objects: <NEW_LINE> <INDENT> k = item.__class__.__name__ + "." + item.id <NEW_LINE> dict[k] = item <NEW_LINE> <DEDENT> return dict <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> objects = self.__session.query(cls).all() <NEW_LINE> for item in objects: <NEW_LINE> <INDENT> k = item.__class__.__name__ + "." + item.id <NEW_LINE> dict[k] = item <NEW_LINE> <DEDENT> return dict <NEW_LINE> <DEDENT> <DEDENT> def new(self, obj): <NEW_LINE> <INDENT> self.__session.add(obj) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> self.__session.commit() <NEW_LINE> <DEDENT> def delete(self, obj=None): <NEW_LINE> <INDENT> if obj is not None: <NEW_LINE> <INDENT> self.__session.delete(obj) <NEW_LINE> <DEDENT> <DEDENT> def reload(self): <NEW_LINE> <INDENT> Base.metadata.create_all(self.__engine) <NEW_LINE> a = sessionmaker(bind=self.__engine, expire_on_commit=False) <NEW_LINE> Session = scoped_session(a) <NEW_LINE> self.__session = Session() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.__session.remove()
storage of class insts like one from filestorage
6259903330c21e258be998f8
class Meta: <NEW_LINE> <INDENT> model = Question <NEW_LINE> fields = ('url', 'question_text', 'pub_date')
using the ModelSerializer class to refactor serializer
62599033287bf620b6272cd3
class GRIDB(BaseCard): <NEW_LINE> <INDENT> type = 'GRIDB' <NEW_LINE> _field_map = {1: 'nid', 4:'phi', 6:'cd', 7:'ps', 8:'idf'} <NEW_LINE> def __init__(self, nid, phi, cd, ps, idf, comment=''): <NEW_LINE> <INDENT> if comment: <NEW_LINE> <INDENT> self.comment = comment <NEW_LINE> <DEDENT> self.nid = nid <NEW_LINE> self.phi = phi <NEW_LINE> self.cd = cd <NEW_LINE> self.ps = ps <NEW_LINE> self.idf = idf <NEW_LINE> assert self.nid > 0, 'nid=%s' % self.nid <NEW_LINE> assert self.phi >= 0, 'phi=%s' % self.phi <NEW_LINE> assert self.cd >= 0, 'cd=%s' % self.cd <NEW_LINE> assert self.ps >= 0, 'ps=%s' % self.ps <NEW_LINE> assert self.idf >= 0, 'idf=%s' % self.idf <NEW_LINE> self.cd_ref = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_card(cls, card, comment=''): <NEW_LINE> <INDENT> nid = integer(card, 1, 'nid') <NEW_LINE> phi = double(card, 4, 'phi') <NEW_LINE> cd = integer(card, 6, 'cd') <NEW_LINE> ps = integer(card, 7, 'ps') <NEW_LINE> idf = integer(card, 8, 'idf') <NEW_LINE> return GRIDB(nid, phi, cd, ps, idf, comment=comment) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_op2_data(cls, data, comment=''): <NEW_LINE> <INDENT> nid = data[0] <NEW_LINE> phi = data[1] <NEW_LINE> cd = data[2] <NEW_LINE> ps = data[3] <NEW_LINE> idf = data[4] <NEW_LINE> return GRIDB(nid, phi, cd, ps, idf, comment=comment) <NEW_LINE> <DEDENT> def _verify(self, xref): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Cd(self): <NEW_LINE> <INDENT> if self.cd_ref is None: <NEW_LINE> <INDENT> return self.cd <NEW_LINE> <DEDENT> return self.cd_ref.cid <NEW_LINE> <DEDENT> def raw_fields(self): <NEW_LINE> <INDENT> list_fields = ['GRIDB', self.nid, None, None, self.phi, None, self.Cd(), self.ps, self.idf] <NEW_LINE> return list_fields <NEW_LINE> <DEDENT> def repr_fields(self): <NEW_LINE> <INDENT> cd = set_blank_if_default(self.Cd(), 0) <NEW_LINE> ps = set_blank_if_default(self.ps, 0) <NEW_LINE> idf = set_blank_if_default(self.idf, 0) <NEW_LINE> list_fields = ['GRIDB', self.nid, None, None, self.phi, None, cd, ps, idf] <NEW_LINE> return list_fields <NEW_LINE> <DEDENT> def write_card(self, size=8, is_double=False): <NEW_LINE> <INDENT> card = self.repr_fields() <NEW_LINE> if size == 8: <NEW_LINE> <INDENT> return self.comment + print_card_8(card) <NEW_LINE> <DEDENT> return self.comment + print_card_16(card)
defines the GRIDB class
6259903321bff66bcd723d51
class Exam(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=30) <NEW_LINE> num_questions = models.IntegerField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Table containing the list of all exams
6259903350485f2cf55dc069
class IdentityMediumConfig(object): <NEW_LINE> <INDENT> init_scale = 0.05 <NEW_LINE> learning_rate = 0.8 <NEW_LINE> max_grad_norm = 5 <NEW_LINE> num_layers = 2 <NEW_LINE> num_steps = 35 <NEW_LINE> hidden_size = 650 <NEW_LINE> max_epoch = 6 <NEW_LINE> max_max_epoch = 39 <NEW_LINE> keep_prob = 0.5 <NEW_LINE> lr_decay = 0.8 <NEW_LINE> batch_size = 20 <NEW_LINE> vocab_size = 10000 <NEW_LINE> use_tanh = False
Identity Medium config.
62599033a4f1c619b294f6e3
class StringEditingTests(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.myString = MyString() <NEW_LINE> <DEDENT> def test_reverse(self): <NEW_LINE> <INDENT> tests = ["Hello World!", (1,2,3,4), [9,8,7,6,5,4,3,2,1]] <NEW_LINE> for i in tests: <NEW_LINE> <INDENT> orig = i <NEW_LINE> obj_reversed= reversed(orig) <NEW_LINE> if isinstance(i, str): <NEW_LINE> <INDENT> expected = "".join(obj_reversed) <NEW_LINE> <DEDENT> if isinstance(i, list): <NEW_LINE> <INDENT> expected = list(obj_reversed) <NEW_LINE> <DEDENT> if isinstance(i, tuple): <NEW_LINE> <INDENT> expected = tuple(obj_reversed) <NEW_LINE> <DEDENT> output = self.myString.reverse(orig) <NEW_LINE> self.assertEquals(output, expected, "{0} expected, but got {1}".format(expected, output)) <NEW_LINE> <DEDENT> <DEDENT> def test_count_vowels(self): <NEW_LINE> <INDENT> some_string = "testing one two three" <NEW_LINE> no_vowels = 7 <NEW_LINE> actual = self.myString.count_vowels(some_string) <NEW_LINE> self.assertEqual(actual, no_vowels, "{0} expected, but got {1}".format(no_vowels, actual)) <NEW_LINE> <DEDENT> def test_is_palindrome(self): <NEW_LINE> <INDENT> for phrase in ["mum mum", "racecar"]: <NEW_LINE> <INDENT> self.assertTrue(self.myString.is_palindrome(phrase)) <NEW_LINE> <DEDENT> self.assertFalse(self.myString.is_palindrome("not_a_palindrome")) <NEW_LINE> <DEDENT> def test_count_words_string(self): <NEW_LINE> <INDENT> string = "This is a string of some length that I will work out by using a function" <NEW_LINE> words = string.split() <NEW_LINE> word_count = len(words) <NEW_LINE> self.assertEqual(self.myString.count_words(string=string), word_count) <NEW_LINE> file = "test_resources/testing_file.txt" <NEW_LINE> with open(file) as f: <NEW_LINE> <INDENT> string = f.read() <NEW_LINE> words = string.split() <NEW_LINE> word_count = len(words) <NEW_LINE> self.assertEqual(self.myString.count_words(file=file), word_count)
StringEditing will contain a bunch of methods to edit and play with strings
62599033ac7a0e7691f735d4
class Sleep(_State): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def set(cls, user): <NEW_LINE> <INDENT> user.messages = "" <NEW_LINE> super().set(user) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _jump(cls, user, user_msg): <NEW_LINE> <INDENT> if user_msg.startswith('/start'): <NEW_LINE> <INDENT> change_state(Waiting, user) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Zzzz-State. Waiting for user actions Sleep -/start-> Waiting
6259903323e79379d538d5f7
class DbFind: <NEW_LINE> <INDENT> def __init__(self,editor): <NEW_LINE> <INDENT> if 0: <NEW_LINE> <INDENT> assert isinstance(editor,Scintilla) <NEW_LINE> <DEDENT> self.editor = editor <NEW_LINE> editor.setSelBack(QColor('blue')) <NEW_LINE> editor.setSelFore(QColor('white')) <NEW_LINE> <DEDENT> def doubleClick(self, pos, line): <NEW_LINE> <INDENT> oldrange = self.editor.getMainSelection() <NEW_LINE> text = self.editor.textRange(*oldrange) <NEW_LINE> if text: <NEW_LINE> <INDENT> self.hight_text(text) <NEW_LINE> self.editor.addSelection(*oldrange) <NEW_LINE> <DEDENT> <DEDENT> def hight_text(self ,text ): <NEW_LINE> <INDENT> pt = b'(?<=\\W)('+text+b')((?=\\W)|$)' <NEW_LINE> for i in re.finditer(pt, self.editor.text()): <NEW_LINE> <INDENT> self.editor.addSelection(i.start(1),i.end(1))
双击的时候,查找相同项 editor.setExtraHight()
6259903373bcbd0ca4bcb374
class AttachmentException(BugsyException): <NEW_LINE> <INDENT> pass
If we try do something that is not allowed to an attachment then this error is raised
62599033287bf620b6272cd5
class Ircrr(callbacks.Plugin): <NEW_LINE> <INDENT> threaded = True <NEW_LINE> class rr(callbacks.Commands): <NEW_LINE> <INDENT> zone_id = conf.supybot.plugins.Ircrr.rr.zone_id() <NEW_LINE> zone_name = conf.supybot.plugins.Ircrr.rr.zone() <NEW_LINE> pattern = re.compile(r"\b(\w+)\s*:\s*([^\s]+)") <NEW_LINE> def add(self, irc, msg, args, subdomain, rtype, content): <NEW_LINE> <INDENT> cf_send = get_cf() <NEW_LINE> zone = self.zone_name <NEW_LINE> name = subdomain+'.'+zone <NEW_LINE> if rtype.upper() not in ('A','AAAA','CNAME'): <NEW_LINE> <INDENT> irc.error('Invalid Round Robin record type.', prefixNick=False) <NEW_LINE> <DEDENT> body = {'name': name, 'content': content, 'type': rtype} <NEW_LINE> response = cf_send.zones.dns_records.post(self.zone_id, data = body) <NEW_LINE> response = response.get('result') <NEW_LINE> if response: <NEW_LINE> <INDENT> irc.reply("Record added, Name: %(name)s Content: %(content)s Type: %(type)" % response, prefixNick=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> irc.error("Failure", prefixNick=False) <NEW_LINE> <DEDENT> <DEDENT> add = wrap(add, ['admin', 'something', 'something', 'something']) <NEW_LINE> def rem(self, irc, msg, args, record_id): <NEW_LINE> <INDENT> cf_send = get_cf() <NEW_LINE> zone = self.zone_id <NEW_LINE> response = cf_send.zones.dns_records.delete(zone, record_id) <NEW_LINE> try: <NEW_LINE> <INDENT> id = response['result']['id'] <NEW_LINE> irc.reply('Record ID %s removed' % id, prefixNick=False) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> rem = wrap(rem, ['admin', 'something']) <NEW_LINE> def get(self, irc, msg, args, subdomain, extra): <NEW_LINE> <INDENT> cf_send = get_cf() <NEW_LINE> zone = self.zone_id <NEW_LINE> zone_name = self.zone_name <NEW_LINE> body = {'name': subdomain+'.'+zone_name} <NEW_LINE> if extra != None: <NEW_LINE> <INDENT> options = dict(self.pattern.findall(extra)) <NEW_LINE> body.update(options) <NEW_LINE> <DEDENT> dns_records = cf_send.zones.dns_records.get(zone, params = body) <NEW_LINE> dns_records = dns_records.get('result') <NEW_LINE> irc.replies(['%(id)s / %(name)s / %(type)s / %(content)s' % dns_record for dns_record in dns_records], prefixNick=False) <NEW_LINE> <DEDENT> get = wrap(get, ['admin', 'something', optional('text')])
Allows access to the Cloudflare (tm) API to manage Round Robins
62599033287bf620b6272cd6
class DisplayError(Exception): <NEW_LINE> <INDENT> pass
An error prevent display to be done correctly.
6259903350485f2cf55dc06b
class SignAuthorizedResource(Resource): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> method_decorators = [query_authorized, handle_db_exception_inner]
The resource will use sign in query to ensure the request will be called by authorized client.
62599033d10714528d69ef02
class NGImageNoteDetail(generics.RetrieveAPIView): <NEW_LINE> <INDENT> serializer_class = NGImageNoteSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated,) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> return ImageNote.objects.filter(owner=user)
a restful detail view of an ImageNote, without the coordinate data
6259903376d4e153a661dae8
class LoginTestCase(unittest.TestCase): <NEW_LINE> <INDENT> pass
def setUp(self) -> None: def test_login_success(self): self.assertTrue() def test_login_fail(self): self.assertTrue() def test_password_expired(self): self.assertTrue() def test_password_reset_success(self): self.assertTrue() def test_password_reset_fail(self): self.assertTrue()
6259903330c21e258be998fb
class UnaryOperationMutator(AbstractMutator): <NEW_LINE> <INDENT> def __init__(self, identifier, operation_type: UnaryOperation): <NEW_LINE> <INDENT> super().__init__(identifier, MutatorType.UNARY_OP) <NEW_LINE> self._operation_type = operation_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def operation_type(self): <NEW_LINE> <INDENT> return self._operation_type <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'UNARY(id={}, field={}, op={})'.format(self.identifier, self.field, self.operation_type) <NEW_LINE> <DEDENT> def compile(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def mutate(self, message: AbstractMessage): <NEW_LINE> <INDENT> field_value = message.message_get_field(self.field) <NEW_LINE> if field_value is not None: <NEW_LINE> <INDENT> if self.operation_type is UnaryOperation.UNARY_INVERT: <NEW_LINE> <INDENT> mutated_value = UnaryOperation.invert(field_value) <NEW_LINE> <DEDENT> elif self.operation_type is UnaryOperation.UNARY_INCR: <NEW_LINE> <INDENT> mutated_value = UnaryOperation.increment(field_value) <NEW_LINE> <DEDENT> elif self.operation_type is UnaryOperation.UNARY_DECR: <NEW_LINE> <INDENT> mutated_value = UnaryOperation.decrement(field_value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('UnaryMutator does not support operation {}'.format(self.operation_type)) <NEW_LINE> <DEDENT> message.message_set_field(self.field, mutated_value) <NEW_LINE> return '{}({})={}->{}'.format(self._operation_type, self.field, field_value, mutated_value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Message {} has no field "{}"'.format(message.message, self.field))
A simple UNARY_OP (e.g., NOT, INC, DEC) mutator does the actual work of altering the message
6259903307d97122c4217d96
class RedirectError(HttxException): <NEW_LINE> <INDENT> def __init__(self, response, *args): <NEW_LINE> <INDENT> HttxException.__init__(self, response, *args)
A class representing a redirection error (like missing location header, not allowed in a POST request and others)
62599033d99f1b3c44d06792
class size_t_t( _D.fundamental_t ): <NEW_LINE> <INDENT> CPPNAME = 'size_t' <NEW_LINE> def __init__( self ): <NEW_LINE> <INDENT> _D.fundamental_t.__init__( self, size_t_t.CPPNAME )
represents size_t type
62599033796e427e5384f86b
class State: <NEW_LINE> <INDENT> def __init__(self, json_filename, collab=None): <NEW_LINE> <INDENT> with open(json_filename) as f: <NEW_LINE> <INDENT> data = json.load(f) <NEW_LINE> <DEDENT> if collab: <NEW_LINE> <INDENT> self._authors = [] <NEW_LINE> for author in data['authors']: <NEW_LINE> <INDENT> if 'collab' in author and author['collab'] != collab: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self._authors.append(author) <NEW_LINE> <DEDENT> self._institutions = {} <NEW_LINE> for name in data['institutions']: <NEW_LINE> <INDENT> inst = data['institutions'][name] <NEW_LINE> if 'collabs' in inst and collab not in inst['collabs']: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self._institutions[name] = inst <NEW_LINE> <DEDENT> self._thanks = data['thanks'] <NEW_LINE> self._acknowledgements = data['acknowledgements'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._authors = data['authors'] <NEW_LINE> self._institutions = data['institutions'] <NEW_LINE> self._thanks = data['thanks'] <NEW_LINE> self._acknowledgements = data['acknowledgements'] <NEW_LINE> <DEDENT> <DEDENT> def authors(self, date, legacy=False): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> for author in self._authors: <NEW_LINE> <INDENT> if author['from'] <= date and (author['to'] >= date or not author['to']): <NEW_LINE> <INDENT> if legacy or not author.get('legacy', False): <NEW_LINE> <INDENT> ret.append(author) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return ret <NEW_LINE> <DEDENT> def institutions(self, date, **kwargs): <NEW_LINE> <INDENT> insts = {} <NEW_LINE> for a in itertools.chain(self.authors(date, **kwargs)): <NEW_LINE> <INDENT> if 'instnames' in a and a['instnames']: <NEW_LINE> <INDENT> for inst in a['instnames']: <NEW_LINE> <INDENT> insts[inst] = self._institutions[inst] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return insts <NEW_LINE> <DEDENT> def thanks(self, date, **kwargs): <NEW_LINE> <INDENT> thanks = {} <NEW_LINE> for a in itertools.chain(self.authors(date, **kwargs)): <NEW_LINE> <INDENT> if 'thanks' in a and a['thanks']: <NEW_LINE> <INDENT> for t in a['thanks']: <NEW_LINE> <INDENT> thanks[t] = self._thanks[t] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return thanks <NEW_LINE> <DEDENT> def acknowledgements(self, date): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> for ack in self._acknowledgements: <NEW_LINE> <INDENT> if ack['from'] <= date and (ack['to'] >= date or not ack['to']): <NEW_LINE> <INDENT> ret.append(ack['value']) <NEW_LINE> <DEDENT> <DEDENT> if ret and ret[-1][-1] == ';': <NEW_LINE> <INDENT> ret[-1] = ret[-1][:-1]+'.' <NEW_LINE> <DEDENT> return ret
The authorlist state. Args: json_filename (str): name of json file holding state collab (str): (optional) name of collaboration to filter by
625990331d351010ab8f4c08
class A: <NEW_LINE> <INDENT> def __init__(): <NEW_LINE> <INDENT> pass
Hello and goodbye
62599033b830903b9686ecf2
class drawobj_empty(drawobj_base): <NEW_LINE> <INDENT> def __init__( self, parent=None ): <NEW_LINE> <INDENT> super().__init__(parent=parent) <NEW_LINE> <DEDENT> def get_xx_yy(self): <NEW_LINE> <INDENT> drawobj_base.debug_logger.info("drawobj_empty.get_xx_yy") <NEW_LINE> return [0.0], [0.0] <NEW_LINE> <DEDENT> def set_data(self): <NEW_LINE> <INDENT> return
なにも描画しないコンテナとしての描画オブジェクト
62599033cad5886f8bdc58f3
class Modify_State_Delete(basic.SimpleProtocol): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> of_logger.info("Running Modify_State_Delete test") <NEW_LINE> of_ports = of_port_map.keys() <NEW_LINE> of_ports.sort() <NEW_LINE> rc = delete_all_flows(self.controller,of_logger) <NEW_LINE> self.assertEqual(rc, 0, "Failed to delete all flows") <NEW_LINE> of_logger.info("Inserting a flow entry and then deleting it") <NEW_LINE> of_logger.info("Expecting the active_count=0 in table_stats_reply") <NEW_LINE> (Pkt,match) = Wildcard_All_Except_Ingress(self,of_ports) <NEW_LINE> Verify_TableStats(self,active_entries=1) <NEW_LINE> NonStrict_Delete(self,match) <NEW_LINE> Verify_TableStats(self,active_entries=0)
Check Basic Flow Delete request is implemented a) Send OFPT_FLOW_MOD, command = OFPFC_ADD b) Send ofp_table_stats request , verify active_count=1 in reply c) Send OFPT_FLOW_MOD, command = OFPFC_DELETE c) Send ofp_table_stats request , verify active_count=0 in reply
62599033d4950a0f3b1116b6
class MywVMEEntry(MywEntry): <NEW_LINE> <INDENT> def __init__(self, master, label='label',vmeaddr='VMEADDR',vb=None, side='left', helptext="", width=None, userupdate=None): <NEW_LINE> <INDENT> self.vb= vb <NEW_LINE> self.userupdate= userupdate <NEW_LINE> self.vmeaddr= vmeaddr <NEW_LINE> self.getvme() <NEW_LINE> MywEntry.__init__(self, master, label=label, defvalue=self.vmeval, side=side, helptext=helptext, cmdlabel=self.updateentry, width=width, textvariable=None, delaction=None,defaultinx=0,name=None) <NEW_LINE> self.entry.bind("<Leave>", self.updateentry) <NEW_LINE> self.entry.bind("<Key-Return>", self.updateentry) <NEW_LINE> <DEDENT> def getvme(self): <NEW_LINE> <INDENT> if self.vb: <NEW_LINE> <INDENT> self.vmeval= self.vb.io.execute("vmeopr32("+self.vmeaddr+")")[:-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("MywVMEEntry.getvme(): vb not supplied, returning 0x55aa") <NEW_LINE> self.vmeval="0x55aa" <NEW_LINE> <DEDENT> <DEDENT> def updateentry(self, event=None): <NEW_LINE> <INDENT> newentry= self.getEntry() <NEW_LINE> if newentry == self.vmeval: return <NEW_LINE> if self.userupdate: <NEW_LINE> <INDENT> strforvme= self.userupdate(self.vmeval, newentry) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> strforvme= newentry <NEW_LINE> <DEDENT> self.vmeval= newentry <NEW_LINE> if self.vb: <NEW_LINE> <INDENT> self.vb.io.execute("vmeopw32("+self.vmeaddr+", "+ strforvme+")") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("MywVMEEntry.updateentry(): vb not supplied, writing ", self.vmeval)
See MywEntry. This class in addition to MywEntry: - is initialised by VME value (VME read in __init__) - when entry field modified, VME register vmeaddr is updated when: mouse cursor leaves the entry, ENTER is pressed or when label button pressed - userupdate(oldval,newval) method: converts the string given by user to hex/dec number to be written into vme
62599033b57a9660fecd2b75
class AlphaBetaAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> def maxAgent(state, depth, alpha, beta): <NEW_LINE> <INDENT> if terminalTest(state): <NEW_LINE> <INDENT> return state.getScore() <NEW_LINE> <DEDENT> actions = state.getLegalActions(0) <NEW_LINE> curMin = float("-inf") <NEW_LINE> nextBestAct = Directions.STOP <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> utilityVal = minAgent(state.generateSuccessor(0, action), depth, 1, alpha, beta) <NEW_LINE> curMin = max(curMin, utilityVal); <NEW_LINE> if(curMin == utilityVal): <NEW_LINE> <INDENT> nextBestAct = action <NEW_LINE> <DEDENT> if curMin > beta: <NEW_LINE> <INDENT> return curMin <NEW_LINE> <DEDENT> alpha = max(alpha, curMin) <NEW_LINE> <DEDENT> if depth == 0: <NEW_LINE> <INDENT> return nextBestAct <NEW_LINE> <DEDENT> return curMin <NEW_LINE> <DEDENT> def minAgent(state, depth, numGhost, alpha, beta): <NEW_LINE> <INDENT> if terminalTest(state): <NEW_LINE> <INDENT> return state.getScore() <NEW_LINE> <DEDENT> next_ghost = numGhost + 1 <NEW_LINE> if numGhost == state.getNumAgents() - 1: <NEW_LINE> <INDENT> next_ghost = 0 <NEW_LINE> <DEDENT> actions = state.getLegalActions(numGhost) <NEW_LINE> utilityVal = float("inf") <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> if next_ghost != 0: <NEW_LINE> <INDENT> score = minAgent(state.generateSuccessor(numGhost, action), depth, next_ghost, alpha, beta) <NEW_LINE> <DEDENT> elif depth == self.depth - 1: <NEW_LINE> <INDENT> score = self.evaluationFunction(state.generateSuccessor(numGhost, action)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> score = maxAgent(state.generateSuccessor(numGhost, action), depth + 1, alpha, beta) <NEW_LINE> <DEDENT> if score < utilityVal: <NEW_LINE> <INDENT> utilityVal = score <NEW_LINE> <DEDENT> if utilityVal < alpha: <NEW_LINE> <INDENT> return utilityVal <NEW_LINE> <DEDENT> beta = min(beta, utilityVal) <NEW_LINE> <DEDENT> return utilityVal <NEW_LINE> <DEDENT> return maxAgent(gameState, 0, float("-inf"), float("inf"))
Your minimax agent with alpha-beta pruning (question 3)
625990338e05c05ec3f6f6d3