code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Command(BaseCommand): <NEW_LINE> <INDENT> help = 'Send email reminder to Mentors about Reps without reports.' <NEW_LINE> SUBJECT = '[Report] Your mentees with no reports for %s' <NEW_LINE> EMAIL_TEMPLATE = 'emails/mentor_notification.txt' <NEW_LINE> def handle(self, *args, **options): <NEW_LINE> <INDENT> rep_group = Group.objects.get(name='Rep') <NEW_LINE> reps = rep_group.user_set.exclude( userprofile__registration_complete=False) <NEW_LINE> date = go_back_n_months(datetime.datetime.today(), 2) <NEW_LINE> reps_without_report = reps.exclude(reports__month__year=date.year, reports__month__month=date.month) <NEW_LINE> mentors = [rep.userprofile.mentor for rep in reps_without_report] <NEW_LINE> month = number2month(date.month) <NEW_LINE> subject = self.SUBJECT % month <NEW_LINE> data = {'year': date.year, 'month': month, 'reps_without_report': reps_without_report} <NEW_LINE> send_remo_mail(mentors, subject, self.EMAIL_TEMPLATE, data)
Command to send email reminder to Mentors about Reps without reports.
6259907666673b3332c31d79
class DunMcStrategy(Strategy): <NEW_LINE> <INDENT> def __init__(self, env: Environment, model: RQModelBase, memory_capacity=100000, discount_factor=0.96, batch_size=64, epsilon=0.5): <NEW_LINE> <INDENT> super(DunMcStrategy, self).__init__(env, model) <NEW_LINE> self.replay = GenericMemory(memory_capacity, [ ('state', np.float32, env.state_shape), ('q', np.float32, env.num_actions), ]) <NEW_LINE> self.discount_factor = discount_factor <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.mean_reward = IncrementalMean() <NEW_LINE> <DEDENT> def run(self, sess: tf.Session, num_episodes: int, *args, **kwargs): <NEW_LINE> <INDENT> for episode in range(num_episodes): <NEW_LINE> <INDENT> states, actions, rewards, u = self.play_episode(sess) <NEW_LINE> total_reward = rewards.sum() <NEW_LINE> self.mean_reward.add(total_reward) <NEW_LINE> u[-1, actions[-1]] = 0.0 <NEW_LINE> self.replay.append(states[-1], u[-1]) <NEW_LINE> for i in reversed(range(len(actions) - 1)): <NEW_LINE> <INDENT> u[i, actions[i]] = self.discount_factor * (rewards[i + 1] + u[i + 1, actions[i + 1]]) <NEW_LINE> self.replay.append(states[i], u[i]) <NEW_LINE> <DEDENT> if len(self.replay) > self.batch_size: <NEW_LINE> <INDENT> batch_states, batch_u = self.replay.sample(self.batch_size) <NEW_LINE> loss = self.model.train_u(sess, batch_states, batch_u) <NEW_LINE> print('MC: Episode: {0}/{1} Loss={2:.5f} R: {3:.3f} Avg R: {4:.3f}' .format(episode, num_episodes, loss, total_reward, self.mean_reward.value)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def play_episode(self, sess: tf.Session, use_policy: bool = False): <NEW_LINE> <INDENT> states = [] <NEW_LINE> u_values = [] <NEW_LINE> actions = [] <NEW_LINE> rewards = [] <NEW_LINE> last_state = self.env.reset() <NEW_LINE> while True: <NEW_LINE> <INDENT> predicted_r, predicted_u, _, predicted_policy = self.model.predict_r_u_q_p(sess, [last_state]) <NEW_LINE> if use_policy: <NEW_LINE> <INDENT> action = np.random.choice(self.env.num_actions, p=predicted_policy[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if np.random.rand() < self.epsilon: <NEW_LINE> <INDENT> action = np.random.randint(0, self.env.num_actions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = np.argmax(predicted_r[0] + predicted_u[0]) <NEW_LINE> <DEDENT> <DEDENT> new_state = self.env.do_action(action) <NEW_LINE> states.append(last_state) <NEW_LINE> actions.append(action) <NEW_LINE> rewards.append(predicted_r[0][action]) <NEW_LINE> u_values.append(predicted_u[0]) <NEW_LINE> if self.env.is_terminal(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> last_state = new_state <NEW_LINE> <DEDENT> return np.array(states), np.array(actions), np.array(rewards), np.array(u_values)
Updates Q values with the accumulated rewards over a whole episode
625990767047854f46340d33
class DysonPureHotCool(DysonPureCool, DysonHeatingDevice): <NEW_LINE> <INDENT> pass
Dyson Pure Hot+Cool device.
625990764f6381625f19a168
class BibCatalogSystem: <NEW_LINE> <INDENT> TICKET_ATTRIBUTES = ['ticketid', 'priority', 'recordid', 'subject', 'text', 'creator', 'owner', 'date', 'status', 'queue', 'url_display', 'url_modify', 'url_close'] <NEW_LINE> def check_system(self, uid=None): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated") <NEW_LINE> <DEDENT> def ticket_search(self, uid, recordid=-1, subject="", text="", creator="", owner="", date_from="", date_until="", status="", priority="", queue=""): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated") <NEW_LINE> <DEDENT> def ticket_submit(self, uid=None, subject="", recordid=-1, text="", queue="", priority="", owner="",requestor=""): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated") <NEW_LINE> <DEDENT> def ticket_assign(self, uid, ticketid, to_user): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated") <NEW_LINE> <DEDENT> def ticket_set_attribute(self, uid, ticketid, attribute, new_value): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated") <NEW_LINE> <DEDENT> def ticket_get_attribute(self, uid, ticketid, attribute): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated") <NEW_LINE> <DEDENT> def ticket_get_info(self, uid, ticketid, attributes = None): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated") <NEW_LINE> <DEDENT> def ticket_comment(self, uid, ticketid, comment): <NEW_LINE> <INDENT> raise NotImplementedError("This class cannot be instantiated")
A template class for ticket support.
625990763539df3088ecdc11
class IntegerValidator(NumberValidator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> NumberValidator.__init__(self,floats=False) <NEW_LINE> <DEDENT> def Clone(self): <NEW_LINE> <INDENT> return IntegerValidator()
Validator for integer numbers
625990765fdd1c0f98e5f8f8
class ActivePowerLimit(OperationalLimit): <NEW_LINE> <INDENT> def __init__(self, value=0.0, ActivePowerLimitSet=None, *args, **kw_args): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self._ActivePowerLimitSet = None <NEW_LINE> self.ActivePowerLimitSet = ActivePowerLimitSet <NEW_LINE> super(ActivePowerLimit, self).__init__(*args, **kw_args) <NEW_LINE> <DEDENT> _attrs = ["value"] <NEW_LINE> _attr_types = {"value": float} <NEW_LINE> _defaults = {"value": 0.0} <NEW_LINE> _enums = {} <NEW_LINE> _refs = ["ActivePowerLimitSet"] <NEW_LINE> _many_refs = [] <NEW_LINE> def getActivePowerLimitSet(self): <NEW_LINE> <INDENT> return self._ActivePowerLimitSet <NEW_LINE> <DEDENT> def setActivePowerLimitSet(self, value): <NEW_LINE> <INDENT> if self._ActivePowerLimitSet is not None: <NEW_LINE> <INDENT> filtered = [x for x in self.ActivePowerLimitSet.ActivePowerLimits if x != self] <NEW_LINE> self._ActivePowerLimitSet._ActivePowerLimits = filtered <NEW_LINE> <DEDENT> self._ActivePowerLimitSet = value <NEW_LINE> if self._ActivePowerLimitSet is not None: <NEW_LINE> <INDENT> if self not in self._ActivePowerLimitSet._ActivePowerLimits: <NEW_LINE> <INDENT> self._ActivePowerLimitSet._ActivePowerLimits.append(self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> ActivePowerLimitSet = property(getActivePowerLimitSet, setActivePowerLimitSet)
Limit on active power flow.Limit on active power flow.
6259907632920d7e50bc79c2
class IsInHgRepoConstraint(AbstractConstraint): <NEW_LINE> <INDENT> def test(self, view: sublime.View) -> bool: <NEW_LINE> <INDENT> view_info = self.get_view_info(view) <NEW_LINE> if not view_info["file_name"]: <NEW_LINE> <INDENT> raise AlwaysFalsyException("file not on disk") <NEW_LINE> <DEDENT> return self.has_sibling(view_info["file_path"], ".hg/")
Check whether this file is in a Mercurial repo.
62599076aad79263cf430132
class Capture(ContextSensitive): <NEW_LINE> <INDENT> def __init__(self, parser: Parser, zero_length_warning: bool=True) -> None: <NEW_LINE> <INDENT> super(Capture, self).__init__(parser) <NEW_LINE> self.zero_length_warning: bool = zero_length_warning <NEW_LINE> self._can_capture_zero_length: Optional[bool] = None <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> symbol = copy.deepcopy(self.parser, memo) <NEW_LINE> duplicate = self.__class__(symbol, self.zero_length_warning) <NEW_LINE> copy_combined_parser_attrs(self, duplicate) <NEW_LINE> duplicate._can_capture_zero_length = self._can_capture_zero_length <NEW_LINE> return duplicate <NEW_LINE> <DEDENT> @property <NEW_LINE> def can_capture_zero_length(self) -> bool: <NEW_LINE> <INDENT> if self._can_capture_zero_length is None: <NEW_LINE> <INDENT> self._can_capture_zero_length = self.parser._parse(StringView(""))[0] is not None <NEW_LINE> <DEDENT> return cast(bool, self._can_capture_zero_length) <NEW_LINE> <DEDENT> def _rollback(self): <NEW_LINE> <INDENT> return self.grammar.variables__[self.pname].pop() <NEW_LINE> <DEDENT> def _parse(self, text: StringView) -> ParsingResult: <NEW_LINE> <INDENT> node, text_ = self.parser(text) <NEW_LINE> if node is not None: <NEW_LINE> <INDENT> assert self.pname, """Tried to apply an unnamed capture-parser!""" <NEW_LINE> assert not self.parser.drop_content, "Cannot capture content from parsers that drop content!" <NEW_LINE> self.grammar.variables__[self.pname].append(node.content) <NEW_LINE> self.grammar.push_rollback__(self._rollback_location(text, text_), self._rollback) <NEW_LINE> return self._return_value(node), text_ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None, text <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.parser.repr <NEW_LINE> <DEDENT> def static_analysis(self) -> List[AnalysisError]: <NEW_LINE> <INDENT> errors = super().static_analysis() <NEW_LINE> if not self.pname: <NEW_LINE> <INDENT> errors.append(AnalysisError(self.pname, self, Error( 'Capture only works as named parser! Error in parser: ' + str(self), 0, CAPTURE_WITHOUT_PARSERNAME ))) <NEW_LINE> <DEDENT> if self.parser.apply(lambda plist: plist[-1].drop_content): <NEW_LINE> <INDENT> errors.append(AnalysisError(self.pname, self, Error( 'Captured symbol "%s" contains parsers that drop content, ' 'which can lead to unintended results!' % (self.pname or str(self)), 0, CAPTURE_DROPPED_CONTENT_WARNING ))) <NEW_LINE> <DEDENT> if self.zero_length_warning: <NEW_LINE> <INDENT> node, _ = self.parser._parse(StringView('')) <NEW_LINE> if node is not None: <NEW_LINE> <INDENT> errors.append(AnalysisError(self.pname, self, Error( 'Variable "%s" captures zero length strings, which can lead to ' 'its remaining on the stack after backtracking!' % (self.pname or str(self)), 0, ZERO_LENGTH_CAPTURE_POSSIBLE_WARNING ))) <NEW_LINE> self._can_capture_zero_length = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._can_capture_zero_length = False <NEW_LINE> <DEDENT> <DEDENT> return errors
Applies the contained parser and, in case of a match, saves the result in a variable. A variable is a stack of values associated with the contained parser's name. This requires the contained parser to be named.
625990767c178a314d78e8a8
class Test(unittest.TestCase): <NEW_LINE> <INDENT> def testFunction(self): <NEW_LINE> <INDENT> r = Restaurant('sample') <NEW_LINE> self.assertEqual(-1, r.test_grades(['A','B'])) <NEW_LINE> self.assertEqual(1, r.test_grades(['C','B','B'])) <NEW_LINE> self.assertEqual(0, r.test_grades(['A'])) <NEW_LINE> self.assertEqual(0, r.test_grades(['C','B','C']))
test the test_grades funciton in the Restaurant class
625990768a349b6b43687bd5
class Sysadmin(factory.Factory): <NEW_LINE> <INDENT> FACTORY_FOR = ckan.model.User <NEW_LINE> fullname = 'Mr. Test Sysadmin' <NEW_LINE> password = 'pass' <NEW_LINE> about = 'Just another test sysadmin.' <NEW_LINE> name = factory.Sequence(lambda n: 'test_sysadmin_{0:02d}'.format(n)) <NEW_LINE> email = factory.LazyAttribute(_generate_email) <NEW_LINE> sysadmin = True <NEW_LINE> @classmethod <NEW_LINE> def _build(cls, target_class, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError(".build() isn't supported in CKAN") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _create(cls, target_class, *args, **kwargs): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> assert False, "Positional args aren't supported, use keyword args." <NEW_LINE> <DEDENT> user = target_class(**dict(kwargs, sysadmin=True)) <NEW_LINE> ckan.model.Session.add(user) <NEW_LINE> ckan.model.Session.commit() <NEW_LINE> ckan.model.Session.remove() <NEW_LINE> user_dict = helpers.call_action('user_show', id=user.id, context={'user': user.name}) <NEW_LINE> return user_dict
A factory class for creating sysadmin users.
625990764f88993c371f11de
class Gyroscope(morse.core.sensor.Sensor): <NEW_LINE> <INDENT> _name = "Gyroscope" <NEW_LINE> add_data('yaw', 0.0, "float", 'rotation around the Z axis of the sensor, in radian') <NEW_LINE> add_data('pitch', 0.0, "float", 'rotation around the Y axis of the sensor, in radian') <NEW_LINE> add_data('roll', 0.0, "float", 'rotation around the X axis of the sensor, in radian') <NEW_LINE> def __init__(self, obj, parent=None): <NEW_LINE> <INDENT> logger.info('%s initialization' % obj.name) <NEW_LINE> morse.core.sensor.Sensor.__init__(self, obj, parent) <NEW_LINE> logger.info('Component initialized, runs at %.2f Hz', self.frequency) <NEW_LINE> <DEDENT> def default_action(self): <NEW_LINE> <INDENT> yaw = self.position_3d.yaw <NEW_LINE> pitch = self.position_3d.pitch <NEW_LINE> roll = self.position_3d.roll <NEW_LINE> self.robot_parent.yaw = yaw <NEW_LINE> self.robot_parent.pitch = pitch <NEW_LINE> self.robot_parent.roll = roll <NEW_LINE> self.local_data['yaw'] = float(yaw) <NEW_LINE> self.local_data['pitch'] = float(pitch) <NEW_LINE> self.local_data['roll'] = float(roll)
This sensor emulates a Gyroscope, providing the yaw, pitch and roll angles of the sensor object with respect to the Blender world reference axes. Angles are given in radians.
6259907697e22403b383c87e
class PageThree(tk.Frame): <NEW_LINE> <INDENT> def __init__(self, parent, root): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> tk.Label(self, text="这是其他", font=LARGE_FONT).pack() <NEW_LINE> button1 = ttk.Button(self, text="回到选课程", command=lambda: root.show_frame(StartPage)).pack()
其他
625990763317a56b869bf203
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = "users" <NEW_LINE> user_id = db.Column(db.Integer, autoincrement=True, primary_key=True) <NEW_LINE> email = db.Column(db.String(128), nullable=True) <NEW_LINE> password = db.Column(db.String(128), nullable=True) <NEW_LINE> age = db.Column(db.Integer, nullable=True) <NEW_LINE> zipcode = db.Column(db.String(15), nullable=True) <NEW_LINE> def similarity(self, other): <NEW_LINE> <INDENT> u_ratings = {} <NEW_LINE> paired_ratings = [] <NEW_LINE> for r in self.ratings: <NEW_LINE> <INDENT> u_ratings[r.movie_id] = r <NEW_LINE> <DEDENT> for r in other.ratings: <NEW_LINE> <INDENT> u_r = u_ratings.get(r.movie_id) <NEW_LINE> if u_r: <NEW_LINE> <INDENT> paired_ratings.append((u_r.score, r.score)) <NEW_LINE> <DEDENT> <DEDENT> if paired_ratings: <NEW_LINE> <INDENT> return pearson(paired_ratings) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> <DEDENT> def predict_rating(self, movie): <NEW_LINE> <INDENT> other_ratings = movie.ratings <NEW_LINE> similarities = [ (self.similarity(r.user), r) for r in other_ratings ] <NEW_LINE> similarities.sort(key=lambda x: x[0], reverse=True) <NEW_LINE> similarities = [(sim, r) for sim, r in similarities if sim > 0] <NEW_LINE> if not similarities: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> numerator = sum([r.score * sim for sim, r in similarities]) <NEW_LINE> denominator = sum([sim for sim, r in similarities]) <NEW_LINE> return numerator / denominator <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<User user_id={self.user_id} email={self.email}>"
User of ratings website.
625990765fcc89381b266e17
class BIOSConfig(DRACConfig): <NEW_LINE> <INDENT> def __init__(self, bios_settings, committed_job): <NEW_LINE> <INDENT> super(BIOSConfig, self).__init__('BIOS', committed_job) <NEW_LINE> self.bios_settings = bios_settings <NEW_LINE> self.changing_settings = {} <NEW_LINE> <DEDENT> def is_change_required(self): <NEW_LINE> <INDENT> return bool(self.changing_settings) <NEW_LINE> <DEDENT> def validate(self, goal_settings): <NEW_LINE> <INDENT> unknown = set(goal_settings) - set(self.bios_settings) <NEW_LINE> if unknown: <NEW_LINE> <INDENT> raise UnknownSetting("BIOS setting(s) do not exist: %s" % ", ".join(unknown)) <NEW_LINE> <DEDENT> <DEDENT> def process(self, goal_settings): <NEW_LINE> <INDENT> self._determine_initial_state(goal_settings) <NEW_LINE> self._determine_required_changes(goal_settings) <NEW_LINE> <DEDENT> def _determine_initial_state(self, goal_settings): <NEW_LINE> <INDENT> changing = False <NEW_LINE> pending = False <NEW_LINE> conflicting = False <NEW_LINE> for key, goal_setting in goal_settings.items(): <NEW_LINE> <INDENT> bios_setting = self.bios_settings[key] <NEW_LINE> if bios_setting.pending_value is not None: <NEW_LINE> <INDENT> if bios_setting.pending_value == goal_setting: <NEW_LINE> <INDENT> pending = True <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conflicting = True <NEW_LINE> <DEDENT> <DEDENT> if (bios_setting.current_value != goal_setting or bios_setting.pending_value is not None): <NEW_LINE> <INDENT> changing = True <NEW_LINE> <DEDENT> <DEDENT> self.set_initial_state(changing, pending, conflicting) <NEW_LINE> <DEDENT> def _determine_required_changes(self, goal_settings): <NEW_LINE> <INDENT> abandoning = self.is_abandon_required() <NEW_LINE> if abandoning: <NEW_LINE> <INDENT> self.changing_settings = { key: bios_setting.pending_value for key, bios_setting in self.bios_settings.items() if bios_setting.pending_value is not None } <NEW_LINE> self.changing_settings.update(goal_settings) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.changing_settings = {} <NEW_LINE> for key, goal_setting in goal_settings.items(): <NEW_LINE> <INDENT> bios_setting = self.bios_settings[key] <NEW_LINE> if bios_setting.pending_value is not None: <NEW_LINE> <INDENT> if bios_setting.pending_value == goal_setting: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> if (bios_setting.current_value != goal_setting or bios_setting.pending_value is not None): <NEW_LINE> <INDENT> self.changing_settings[key] = goal_setting <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get_settings_to_apply(self): <NEW_LINE> <INDENT> return self.changing_settings.copy()
Configuration state machine for DRAC BIOS settings.
625990763d592f4c4edbc81b
class SymbioticTool(SymbioticBaseTool): <NEW_LINE> <INDENT> def __init__(self, opts): <NEW_LINE> <INDENT> SymbioticBaseTool.__init__(self, opts) <NEW_LINE> self._memsafety = self._options.property.memsafety() <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return 'predator' <NEW_LINE> <DEDENT> def executable(self): <NEW_LINE> <INDENT> return util.find_executable('check-property.sh', 'sl_build/check-property.sh') <NEW_LINE> <DEDENT> def llvm_version(self): <NEW_LINE> <INDENT> return llvm_version <NEW_LINE> <DEDENT> def set_environment(self, symbiotic_dir, opts): <NEW_LINE> <INDENT> opts.linkundef = [] <NEW_LINE> <DEDENT> def passes_before_verification(self): <NEW_LINE> <INDENT> return super().passes_before_verification() + ["-delete-undefined", "-lowerswitch", "-simplifycfg"] <NEW_LINE> <DEDENT> def actions_before_verification(self, symbiotic): <NEW_LINE> <INDENT> output = symbiotic.curfile + '.c' <NEW_LINE> runcmd(['llvm2c', symbiotic.curfile, '--o', output], DbgWatch('all')) <NEW_LINE> symbiotic.curfile = output <NEW_LINE> <DEDENT> def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}): <NEW_LINE> <INDENT> cmd = [self.executable(), '--trace=/dev/null', '--propertyfile', propertyfile, '--'] + tasks <NEW_LINE> if self._options.is32bit: <NEW_LINE> <INDENT> cmd.append("-m32") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd.append("-m64") <NEW_LINE> <DEDENT> return cmd <NEW_LINE> <DEDENT> def determine_result(self, returncode, returnsignal, output, isTimeout): <NEW_LINE> <INDENT> status = "UNKNOWN" <NEW_LINE> for line in (l.decode('ascii') for l in output): <NEW_LINE> <INDENT> if "UNKNOWN" in line: <NEW_LINE> <INDENT> status = result.RESULT_UNKNOWN <NEW_LINE> <DEDENT> elif "TRUE" in line: <NEW_LINE> <INDENT> status = result.RESULT_TRUE_PROP <NEW_LINE> <DEDENT> elif "FALSE(valid-memtrack)" in line: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_MEMTRACK <NEW_LINE> <DEDENT> elif "FALSE(valid-deref)" in line: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_DEREF <NEW_LINE> <DEDENT> elif "FALSE(valid-free)" in line: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_FREE <NEW_LINE> <DEDENT> elif "FALSE(valid-memcleanup)" in line: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_MEMCLEANUP <NEW_LINE> <DEDENT> elif "FALSE" in line: <NEW_LINE> <INDENT> status = result.RESULT_FALSE_REACH <NEW_LINE> <DEDENT> if status == "UNKNOWN" and isTimeout: <NEW_LINE> <INDENT> status = "TIMEOUT" <NEW_LINE> <DEDENT> <DEDENT> return status
Predator integraded into Symbiotic
6259907666673b3332c31d7b
class TestValueRef(object): <NEW_LINE> <INDENT> def test_grammar_typechecking(self): <NEW_LINE> <INDENT> grammar_types = [ ('value', [str]), ('value', [int]), ('value', [float]), ('field', [str]), ('scale', [str]), ('mult', [int]), ('mult', [float]), ('offset', [int]), ('offset', [float]), ('band', [bool])] <NEW_LINE> assert_grammar_typechecking(grammar_types, ValueRef()) <NEW_LINE> <DEDENT> def test_json_serialization(self): <NEW_LINE> <INDENT> vref = ValueRef() <NEW_LINE> nt.assert_equal(json.dumps({}), vref.to_json(pretty_print=False)) <NEW_LINE> props = { 'value': 'test-value', 'band': True} <NEW_LINE> vref = ValueRef(**props) <NEW_LINE> nt.assert_equal(json.dumps(props), vref.to_json(pretty_print=False)) <NEW_LINE> props = { 'value': 'test-value', 'field': 'test-field', 'scale': 'test-scale', 'mult': 1.2, 'offset': 4, 'band': True} <NEW_LINE> vref = ValueRef(**props) <NEW_LINE> nt.assert_equal(json.dumps(props), vref.to_json(pretty_print=False))
Test the ValueRef class
625990762c8b7c6e89bd5165
class Shooter(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.image = pygame.Surface([20, 20]) <NEW_LINE> self.image.fill(RED) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.rect.x = 600 <NEW_LINE> self.rect.y = 10
this class represents the player
625990764e4d562566373d7e
class Match(ctypes.Structure): <NEW_LINE> <INDENT> _fields_ = [("projection", ctypes.c_char_p), ("delta", ctypes.c_double)]
Python class to mirror match struct in c
62599076a17c0f6771d5d86b
class test_BrLenDerivatives_ExpCM_empirical_phi(test_BrLenDerivatives_ExpCM): <NEW_LINE> <INDENT> MODEL = phydmslib.models.ExpCM_empirical_phi
Test branch length derv for ExpCM with empirical phi.
6259907632920d7e50bc79c4
class StateAwareWeblogConfigEditForm(WeblogConfigEditForm): <NEW_LINE> <INDENT> label = _(u'Configure Blog') <NEW_LINE> description = _(u"Weblog View Configuration") <NEW_LINE> form_name = _(u"Configure rule") <NEW_LINE> template = ViewPageTemplateFile('weblogconfig.pt') <NEW_LINE> form_fields = form.Fields(IStateAwareWeblogConfiguration) <NEW_LINE> def setUpWidgets(self, ignore_request=False): <NEW_LINE> <INDENT> self.adapters = {} <NEW_LINE> wvconfig = IStateAwareWeblogConfiguration(self.context) <NEW_LINE> self.widgets = form.setUpEditWidgets( self.form_fields, self.prefix, wvconfig, self.request, adapters=self.adapters, ignore_request=ignore_request ) <NEW_LINE> <DEDENT> @form.action("submit") <NEW_LINE> def submit(self, action, data): <NEW_LINE> <INDENT> wvconfig = IStateAwareWeblogConfiguration(self.context) <NEW_LINE> form.applyChanges(wvconfig, self.form_fields, data) <NEW_LINE> msg = _(u'Configuration saved.') <NEW_LINE> IStatusMessage(self.request).addStatusMessage(msg, type='info')
Edit form for weblog view configuration.
625990768a43f66fc4bf3b12
class VrSamplePage(page.Page): <NEW_LINE> <INDENT> def __init__(self, sample_page, page_set, url_parameters=None, extra_browser_args=None): <NEW_LINE> <INDENT> url = '%s.html' % sample_page <NEW_LINE> if url_parameters is not None: <NEW_LINE> <INDENT> url += '?' + '&'.join(url_parameters) <NEW_LINE> <DEDENT> name = url.replace('.html', '') <NEW_LINE> url = 'file://' + os.path.join(SAMPLE_DIR, url) <NEW_LINE> super(VrSamplePage, self).__init__( url=url, page_set=page_set, name=name, extra_browser_args=extra_browser_args, shared_page_state_class=vr_state.SharedAndroidVrPageState) <NEW_LINE> self._shared_page_state = None <NEW_LINE> <DEDENT> def Run(self, shared_state): <NEW_LINE> <INDENT> self._shared_page_state = shared_state <NEW_LINE> super(VrSamplePage, self).Run(shared_state) <NEW_LINE> <DEDENT> @property <NEW_LINE> def platform(self): <NEW_LINE> <INDENT> return self._shared_page_state.platform
Superclass for all VR sample pages.
6259907691f36d47f2231b4d
class VGG(nn.Module): <NEW_LINE> <INDENT> def __init__(self, features): <NEW_LINE> <INDENT> super(VGG, self).__init__() <NEW_LINE> self.features = features <NEW_LINE> self.classifier = nn.Sequential( nn.Dropout(), nn.Linear(512, 512), nn.ReLU(True), nn.Dropout(), nn.Linear(512, 512), nn.ReLU(True), nn.Linear(512, 100), ) <NEW_LINE> for m in self.modules(): <NEW_LINE> <INDENT> if isinstance(m, nn.Conv2d): <NEW_LINE> <INDENT> n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels <NEW_LINE> m.weight.data.normal_(0, math.sqrt(2. / n)) <NEW_LINE> m.bias.data.zero_() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.features(x) <NEW_LINE> x = x.view(x.size(0), -1) <NEW_LINE> x = self.classifier(x) <NEW_LINE> return x
VGG model
6259907616aa5153ce401e57
class ViewTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> user = User.objects.create_superuser(username='olivia', password='secret', email="[email protected]") <NEW_LINE> self.client = APIClient() <NEW_LINE> self.client.force_authenticate(user=user) <NEW_LINE> self.instance_data = { "name": "James", } <NEW_LINE> self.pk = None <NEW_LINE> self.response = self.client.post( reverse(module + ':api-create-room'), self.instance_data, ) <NEW_LINE> self.api_list = self.client.get(reverse(module+':api-list-rooms')) <NEW_LINE> <DEDENT> def test_api_can_create_an_instance(self): <NEW_LINE> <INDENT> self.assertEqual(self.response.status_code, status.HTTP_201_CREATED) <NEW_LINE> self.assertEqual(json.loads(self.response.content).get('name'), self.instance_data.get('name')) <NEW_LINE> instance = Table.objects.first() <NEW_LINE> self.pk = instance.pk <NEW_LINE> self.assertEqual(instance.name, 'James') <NEW_LINE> api_update = self.client.put( reverse(module + ':api-update', kwargs={'pk': self.pk}), self.instance_data, ) <NEW_LINE> self.assertEqual(api_update.data.get('name'), 'James') <NEW_LINE> <DEDENT> def test_api_list(self): <NEW_LINE> <INDENT> self.assertEqual(self.api_list.status_code, status.HTTP_200_OK)
Test suite for the api views.
62599076091ae356687065b7
class ColorBar(PlotlyDict): <NEW_LINE> <INDENT> pass
ColorBar doc.
6259907697e22403b383c880
class annotator_1to1(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined - class is abstract") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def make(*args, **kwargs): <NEW_LINE> <INDENT> return _blocks_swig0.annotator_1to1_make(*args, **kwargs) <NEW_LINE> <DEDENT> make = staticmethod(make) <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return _blocks_swig0.annotator_1to1_data(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _blocks_swig0.delete_annotator_1to1 <NEW_LINE> __del__ = lambda self : None;
1-to-1 stream annotator testing block. FOR TESTING PURPOSES ONLY. This block creates tags to be sent downstream every 10,000 items it sees. The tags contain the name and ID of the instantiated block, use "seq" as a key, and have a counter that increments by 1 for every tag produced that is used as the tag's value. The tags are propagated using the 1-to-1 policy. It also stores a copy of all tags it sees flow past it. These tags can be recalled externally with the data() member. Warning: This block is only meant for testing and showing how to use the tags. Constructor Specific Documentation: Args: when : sizeof_stream_item :
6259907632920d7e50bc79c5
class WhoFromCmd(Command): <NEW_LINE> <INDENT> aliases = ('@who', '@@who') <NEW_LINE> syntax = "[<player>]" <NEW_LINE> lock = "perm(manage players)" <NEW_LINE> arg_parsers = { 'player': parsers.MatchDescendants(cls=BasePlayer, search_for='player', show=True), } <NEW_LINE> def run(self, this, actor, args): <NEW_LINE> <INDENT> if args['player'] is None: <NEW_LINE> <INDENT> table = ui.Table([ ui.Column('Player', data_key='player', align='l', cell_formatter=actor.name_for), ui.Column('IP', data_key='ip', align='l'), ui.Column('Host', data_key='hostname', align='l'), ]) <NEW_LINE> table.add_rows(*self.game.session_handler.sessions) <NEW_LINE> actor.msg(ui.report("Player Connection Info", table)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> p = args['player'] <NEW_LINE> if not p.connected: <NEW_LINE> <INDENT> actor.tell("{m", p, "{y is not connected.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = p.session <NEW_LINE> actor.tell("{m", p, "{n is connected from IP {y", s.ip, "{n ({c", s.hostname, "{n)")
@who [<player>] Display connection information for the specified player.
625990763317a56b869bf204
class Connection(rackit.Connection): <NEW_LINE> <INDENT> projects = rackit.RootResource(AuthProject) <NEW_LINE> def __init__(self, auth_url, params, interface = 'public', verify = True): <NEW_LINE> <INDENT> self.auth_url = auth_url.rstrip('/') <NEW_LINE> self.params = params <NEW_LINE> self.interface = interface <NEW_LINE> self.verify = verify <NEW_LINE> session = requests.Session() <NEW_LINE> session.auth = self <NEW_LINE> session.verify = verify <NEW_LINE> super().__init__(auth_url, session) <NEW_LINE> try: <NEW_LINE> <INDENT> response = self.api_post('/auth/tokens', json = dict(auth = params.as_dict())) <NEW_LINE> <DEDENT> except rackit.ApiError: <NEW_LINE> <INDENT> session.close() <NEW_LINE> raise <NEW_LINE> <DEDENT> self.token = response.headers['X-Subject-Token'] <NEW_LINE> json = response.json() <NEW_LINE> self.username = json['token']['user']['name'] <NEW_LINE> self.project_id = json['token'].get('project', {}).get('id') <NEW_LINE> self.endpoints = {} <NEW_LINE> for entry in json['token'].get('catalog', []): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> endpoint = next( ep['url'] for ep in entry['endpoints'] if ep['interface'] == self.interface ) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.endpoints[entry['type']] = urlsplit(endpoint)._replace(path = '').geturl() <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> request.headers['X-Auth-Token'] = self.token <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> def scoped_connection(self, project_or_id): <NEW_LINE> <INDENT> if isinstance(project_or_id, Resource): <NEW_LINE> <INDENT> project_id = project_or_id.id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> project_id = project_or_id <NEW_LINE> <DEDENT> return Connection( self.auth_url, AuthParams().use_token(self.token).use_project_id(project_id), self.interface, self.verify )
Class for a connection to an OpenStack API, which handles the authentication, project and service discovery elements. Can be used as an auth object for a requests session.
625990765fcc89381b266e18
class CategoryDaoImpl(CategoryDao): <NEW_LINE> <INDENT> def find_all(self): <NEW_LINE> <INDENT> super().find_all() <NEW_LINE> <DEDENT> def find_by_id(self, id=0): <NEW_LINE> <INDENT> super().find_by_id(id) <NEW_LINE> <DEDENT> def save(self, product): <NEW_LINE> <INDENT> super().save(product) <NEW_LINE> <DEDENT> def update(self, product): <NEW_LINE> <INDENT> super().update(product) <NEW_LINE> <DEDENT> def delete(self, product): <NEW_LINE> <INDENT> super().delete(product) <NEW_LINE> <DEDENT> def delete_by_id(self, id): <NEW_LINE> <INDENT> super().delete_by_id(id)
This class helps to do CRUD operations for the Category
6259907621bff66bcd7245e7
class EditHandler(BaseHandler): <NEW_LINE> <INDENT> @tornado.web.authenticated <NEW_LINE> @tornado.web.addslash <NEW_LINE> def get(self, network_name): <NEW_LINE> <INDENT> network = self.bouncer.networks[network_name] <NEW_LINE> self.render("edit.html", network=network, **self.env) <NEW_LINE> <DEDENT> @tornado.web.authenticated <NEW_LINE> @tornado.web.addslash <NEW_LINE> def post(self, network_name): <NEW_LINE> <INDENT> self.bouncer.remove_network(network_name) <NEW_LINE> network_name = self.get_argument("networkname") <NEW_LINE> network_address = self.get_argument("networkaddress") <NEW_LINE> nickname = self.get_argument("nickname") <NEW_LINE> realname = self.get_argument("realname") <NEW_LINE> ident = self.get_argument("ident") <NEW_LINE> password = self.get_argument("password") <NEW_LINE> hostname, port = network_address.split(":") <NEW_LINE> self.bouncer.add_network(network=network_name, hostname=hostname, port=port, nickname=nickname, realname=realname, username=ident, password=password) <NEW_LINE> self.redirect("/")
The RequestHandler that serves the edit network page. The edit network page uses a form to receive updated settings from users. When a network is editted, it is disconnected and then recreated using the new settings.
62599076a17c0f6771d5d86c
class Update(ValuesBase): <NEW_LINE> <INDENT> __visit_name__ = 'update' <NEW_LINE> def __init__(self, table, whereclause, values=None, inline=False, bind=None, returning=None, **kwargs): <NEW_LINE> <INDENT> ValuesBase.__init__(self, table, values) <NEW_LINE> self._bind = bind <NEW_LINE> self._returning = returning <NEW_LINE> if whereclause is not None: <NEW_LINE> <INDENT> self._whereclause = _literal_as_text(whereclause) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._whereclause = None <NEW_LINE> <DEDENT> self.inline = inline <NEW_LINE> if kwargs: <NEW_LINE> <INDENT> self.kwargs = self._process_deprecated_kw(kwargs) <NEW_LINE> <DEDENT> <DEDENT> def get_children(self, **kwargs): <NEW_LINE> <INDENT> if self._whereclause is not None: <NEW_LINE> <INDENT> return self._whereclause, <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> <DEDENT> def _copy_internals(self, clone=_clone, **kw): <NEW_LINE> <INDENT> self._whereclause = clone(self._whereclause, **kw) <NEW_LINE> self.parameters = self.parameters.copy() <NEW_LINE> <DEDENT> @_generative <NEW_LINE> def where(self, whereclause): <NEW_LINE> <INDENT> if self._whereclause is not None: <NEW_LINE> <INDENT> self._whereclause = and_(self._whereclause, _literal_as_text(whereclause)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._whereclause = _literal_as_text(whereclause) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def _extra_froms(self): <NEW_LINE> <INDENT> froms = [] <NEW_LINE> seen = set([self.table]) <NEW_LINE> if self._whereclause is not None: <NEW_LINE> <INDENT> for item in _from_objects(self._whereclause): <NEW_LINE> <INDENT> if not seen.intersection(item._cloned_set): <NEW_LINE> <INDENT> froms.append(item) <NEW_LINE> <DEDENT> seen.update(item._cloned_set) <NEW_LINE> <DEDENT> <DEDENT> return froms
Represent an Update construct. The :class:`.Update` object is created using the :func:`update()` function.
625990777c178a314d78e8aa
class Button(): <NEW_LINE> <INDENT> def __init__(self, screen, msg): <NEW_LINE> <INDENT> self.screen = screen <NEW_LINE> self.screen_rect = screen.get_rect() <NEW_LINE> self.width, self.height = 200, 50 <NEW_LINE> self.button_color = (0, 255, 0) <NEW_LINE> self.text_color = (255, 255, 255) <NEW_LINE> self.font = pygame.font.SysFont(None, 48) <NEW_LINE> self.rect = pygame.Rect(0, 0, self.width, self.height) <NEW_LINE> self.rect.center = self.screen_rect.center <NEW_LINE> self.prep_msg(msg) <NEW_LINE> <DEDENT> def prep_msg(self, msg): <NEW_LINE> <INDENT> self.msg_image = self.font.render(msg, True, self.text_color, self.button_color) <NEW_LINE> self.msg_image_rect = self.msg_image.get_rect() <NEW_LINE> self.msg_image_rect.center = self.rect.center <NEW_LINE> <DEDENT> def draw_button(self): <NEW_LINE> <INDENT> self.screen.fill(self.button_color, self.rect) <NEW_LINE> self.screen.blit(self.msg_image, self.msg_image_rect)
Button class
62599077e1aae11d1e7cf4ce
class PackageUpdate(NotContainerized, OpenShiftCheck): <NEW_LINE> <INDENT> name = "package_update" <NEW_LINE> tags = ["preflight"] <NEW_LINE> def run(self, tmp, task_vars): <NEW_LINE> <INDENT> args = {"packages": []} <NEW_LINE> return self.module_executor("check_yum_update", args, tmp, task_vars)
Check that there are no conflicts in RPM packages.
6259907744b2445a339b761d
class L1ICache(L1Cache): <NEW_LINE> <INDENT> size = '16kB' <NEW_LINE> clusivity = 'mostly_incl' <NEW_LINE> def __init__(self, options=None): <NEW_LINE> <INDENT> super(L1ICache, self).__init__(options) <NEW_LINE> if not options or not options.l1i_size: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.size = options.l1i_size <NEW_LINE> self.clusivity = options.l1i_type <NEW_LINE> <DEDENT> def connectCPU(self, cpu): <NEW_LINE> <INDENT> self.cpu_side = cpu.icache_port
Simple L1 instruction cache with default values
625990778a349b6b43687bd9
class Testcase_260_250_FlowmodPriority(base_tests.SimpleDataPlane): <NEW_LINE> <INDENT> @wireshark_capture <NEW_LINE> def runTest(self): <NEW_LINE> <INDENT> logging.info("Running 260.250 - Priority level of flow entry test") <NEW_LINE> rv = delete_all_flows(self.controller) <NEW_LINE> self.assertEqual(rv, 0, "Failed to delete all flows") <NEW_LINE> in_port, out_port1, out_port2= openflow_ports(3) <NEW_LINE> table_id=0 <NEW_LINE> priority=1000 <NEW_LINE> actions=[ofp.action.output(port=out_port1, max_len=128)] <NEW_LINE> instructions=[ofp.instruction.apply_actions(actions=actions)] <NEW_LINE> match = ofp.match([ ofp.oxm.eth_dst([0x00, 0x01, 0x02, 0x03, 0x04, 0x05]) ]) <NEW_LINE> req = ofp.message.flow_add(table_id=table_id, match= match, buffer_id=ofp.OFP_NO_BUFFER, instructions=instructions, priority=priority) <NEW_LINE> logging.info("Sending flowmod") <NEW_LINE> rv = self.controller.message_send(req) <NEW_LINE> self.assertTrue(rv != -1, "Failed to insert flow") <NEW_LINE> priority=2000 <NEW_LINE> actions=[ofp.action.output(port=out_port2, max_len=128)] <NEW_LINE> instructions=[ofp.instruction.apply_actions(actions=actions)] <NEW_LINE> match = ofp.match([ ofp.oxm.eth_type(0x0800) ]) <NEW_LINE> req = ofp.message.flow_add(table_id=table_id, match= match, buffer_id=ofp.OFP_NO_BUFFER, instructions=instructions, priority=priority) <NEW_LINE> logging.info("Sending flowmod") <NEW_LINE> rv = self.controller.message_send(req) <NEW_LINE> self.assertTrue(rv != -1, "Failed to insert flow") <NEW_LINE> pkt = str(simple_tcp_packet()) <NEW_LINE> logging.info("Sending dataplane packets") <NEW_LINE> self.dataplane.send(in_port, pkt) <NEW_LINE> verify_packet(self, pkt, out_port2) <NEW_LINE> logging.info("Packet forwarded as expected")
260.250 - Priority level of flow entry Verify that traffic matches against higher priority rules
62599077aad79263cf430137
class EdgeCasesTest(QueueDatabaseTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> QueueDatabaseTest.setUp(self) <NEW_LINE> <DEDENT> def test_aggregate_empty_results(self): <NEW_LINE> <INDENT> self.create_wiki_cohort() <NEW_LINE> metric = metric_classes['NamespaceEdits']( name='NamespaceEdits', namespaces=[0, 1, 2], start_date='2010-01-01 00:00:00', end_date='2010-01-02 00:00:00', ) <NEW_LINE> options = { 'individualResults': True, 'aggregateResults': True, 'aggregateSum': True, 'aggregateAverage': True, 'aggregateStandardDeviation': True, } <NEW_LINE> ar = AggregateReport( metric, self.basic_wiki_cohort, options, user_id=self.basic_wiki_cohort_owner, ) <NEW_LINE> result = ar.task.delay(ar).get() <NEW_LINE> assert_equals(result[Aggregation.IND].keys(), []) <NEW_LINE> assert_equals(result[Aggregation.SUM]['edits'], r(0)) <NEW_LINE> assert_equals(result[Aggregation.AVG]['edits'], r(0)) <NEW_LINE> assert_equals(result[Aggregation.STD]['edits'], r(0))
Tests different cases for metric system when it comes to report results
62599077d268445f2663a81d
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = util.Counter() <NEW_LINE> for p in self.legalPositions: self.beliefs[p] = 1.0 <NEW_LINE> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observe(self, observation, gameState): <NEW_LINE> <INDENT> noisyDistance = observation <NEW_LINE> emissionModel = busters.getObservationDistribution(noisyDistance) <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> allPossible = util.Counter() <NEW_LINE> if noisyDistance == None: <NEW_LINE> <INDENT> allPossible = util.Counter() <NEW_LINE> allPossible[self.getJailPosition()] = 1.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for location in self.legalPositions: <NEW_LINE> <INDENT> distance = util.manhattanDistance(location, pacmanPosition) <NEW_LINE> allPossible[location] = emissionModel[distance] * self.beliefs[location] <NEW_LINE> <DEDENT> <DEDENT> "*** END YOUR CODE HERE ***" <NEW_LINE> allPossible.normalize() <NEW_LINE> self.beliefs = allPossible <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> "*** YOUR CODE HERE ***" <NEW_LINE> allPossible = util.Counter() <NEW_LINE> for oldPos in self.legalPositions: <NEW_LINE> <INDENT> newPosDist = self.getPositionDistribution(self.setGhostPosition(gameState, oldPos)) <NEW_LINE> for newPos, prob in newPosDist.items(): <NEW_LINE> <INDENT> allPossible[newPos] += prob*self.beliefs[oldPos] <NEW_LINE> <DEDENT> <DEDENT> self.beliefs = allPossible <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs
The exact dynamic inference module should use forward-algorithm updates to compute the exact belief function at each time step.
625990773346ee7daa33831f
class Column(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.type = NoType <NEW_LINE> self.dtype = None <NEW_LINE> self.str_vals = [] <NEW_LINE> self.fill_values = {}
Table column. The key attributes of a Column object are: * **name** : column name * **type** : column type (NoType, StrType, NumType, FloatType, IntType) * **dtype** : numpy dtype (optional, overrides **type** if set) * **str_vals** : list of column values as strings * **data** : list of converted column values
62599077091ae356687065b9
class Predicate(Pattern): <NEW_LINE> <INDENT> def __init__(self, predicate): <NEW_LINE> <INDENT> self.predicate = predicate <NEW_LINE> <DEDENT> def __match__(self, x): <NEW_LINE> <INDENT> if self.predicate(x): <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise MatchFailure(matched=x, pattern=self) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Predicate({})".format(self.predicate)
Base class for 'predicate' objects implementing the match protocol
625990771b99ca40022901f5
class ComposeFormTests(TestCase): <NEW_LINE> <INDENT> fixtures = ['users'] <NEW_LINE> def test_invalid_data(self): <NEW_LINE> <INDENT> invalid_data_dicts = [ {'data': {'to': 'john', 'body': ''}, 'error': ('body', [u'This field is required.'])}, ] <NEW_LINE> for invalid_dict in invalid_data_dicts: <NEW_LINE> <INDENT> form = ComposeForm(data=invalid_dict['data']) <NEW_LINE> self.failIf(form.is_valid()) <NEW_LINE> self.assertEqual(form.errors[invalid_dict['error'][0]], invalid_dict['error'][1]) <NEW_LINE> <DEDENT> <DEDENT> def test_save_msg(self): <NEW_LINE> <INDENT> valid_data = {'to': 'john, jane', 'body': 'Body'} <NEW_LINE> form = ComposeForm(data=valid_data) <NEW_LINE> self.failUnless(form.is_valid()) <NEW_LINE> sender = get_user_model().objects.get(username='jane') <NEW_LINE> msg = form.save(sender) <NEW_LINE> self.failUnlessEqual(msg.body, valid_data['body']) <NEW_LINE> self.failUnlessEqual(msg.sender, sender) <NEW_LINE> self.failUnless(msg.sent_at) <NEW_LINE> self.failUnlessEqual(msg.recipients.all()[0].username, 'jane') <NEW_LINE> self.failUnlessEqual(msg.recipients.all()[1].username, 'john')
Test the compose form.
6259907756b00c62f0fb4251
class HelmBuildFilteringPipeline(BuildStepsFilteringPipeline): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> super().__init__( [ HelmBuilderValidator(), GiantSwarmHelmValidator(), HelmGitVersionSetter(), HelmRequirementsUpdater(), HelmChartToolLinter(), KubeLinter(), HelmChartMetadataPreparer(), HelmChartBuilder(), HelmChartMetadataFinalizer(), HelmChartYAMLRestorer(), ], "Helm 3 build engine options", )
Pipeline that combines all the steps required to use helm3 as a chart builder.
625990775fcc89381b266e19
class Timetools: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.timeformatter = '%Y-%m-%d %H:%M:%S' <NEW_LINE> self.dateformatter = '%Y年%m月%d日' <NEW_LINE> self.maxdatespan = 50 <NEW_LINE> <DEDENT> def timetostr(self, ttime): <NEW_LINE> <INDENT> return time.strftime(self.timeformatter, ttime) <NEW_LINE> <DEDENT> def strtotime(self, tstr): <NEW_LINE> <INDENT> return time.strptime(tstr, self.timeformatter) <NEW_LINE> <DEDENT> def timetodatetime(self, ti): <NEW_LINE> <INDENT> return datetime.datetime( ti.tm_year, ti.tm_mon, ti.tm_mday, ti.tm_hour, ti.tm_min, ti.tm_sec) <NEW_LINE> <DEDENT> def timestrtodatestr(self, tistr): <NEW_LINE> <INDENT> ts = self.strtotime(tistr) <NEW_LINE> return time.strftime(self.dateformatter, ts) <NEW_LINE> <DEDENT> def sectoother(self, secs): <NEW_LINE> <INDENT> if secs / 3600 > 1: <NEW_LINE> <INDENT> return str(int(secs / 3600)) + "小时前" <NEW_LINE> <DEDENT> elif secs / 60 > 1: <NEW_LINE> <INDENT> return str(int(secs / 60)) + "分钟前" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return str(int(secs)) + "秒前" <NEW_LINE> <DEDENT> <DEDENT> def timetonow(self, tstr): <NEW_LINE> <INDENT> now = time.localtime() <NEW_LINE> last = self.strtotime(tstr) <NEW_LINE> dnow = self.timetodatetime(now) <NEW_LINE> dlast = self.timetodatetime(last) <NEW_LINE> span = dnow - dlast <NEW_LINE> if span.days > self.maxdatespan: <NEW_LINE> <INDENT> return self.timestrtodatestr(tstr) <NEW_LINE> <DEDENT> elif span.days > 0: <NEW_LINE> <INDENT> return str(span.days) + "天前" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.sectoother(int(span.seconds))
日期转换工具
6259907799fddb7c1ca63a95
class TestSetup(unittest.TestCase): <NEW_LINE> <INDENT> layer = PLOMINO_WIZARD_INTEGRATION_TESTING <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.portal = self.layer['portal'] <NEW_LINE> self.installer = api.portal.get_tool('portal_quickinstaller') <NEW_LINE> <DEDENT> def test_product_installed(self): <NEW_LINE> <INDENT> self.assertTrue(self.installer.isProductInstalled('plomino.wizard')) <NEW_LINE> <DEDENT> def test_browserlayer(self): <NEW_LINE> <INDENT> from plomino.wizard.interfaces import IPlominoWizardLayer <NEW_LINE> from plone.browserlayer import utils <NEW_LINE> self.assertIn(IPlominoWizardLayer, utils.registered_layers())
Test that plomino.wizard is properly installed.
625990773317a56b869bf205
class DueFilter(BaseFilter): <NEW_LINE> <INDENT> def __init__(self, dueRange): <NEW_LINE> <INDENT> BaseFilter.__init__(self, dueRange) <NEW_LINE> <DEDENT> def isMatch(self, task): <NEW_LINE> <INDENT> return (not task.is_complete) and (self.text in task.dueRanges) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "DueFilter(%s)" % self.text
Due list filter for ranges
625990774527f215b58eb660
class ContactForm(forms.ModelForm): <NEW_LINE> <INDENT> captcha = ReCaptchaField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Contact <NEW_LINE> fields = ("email", "captcha") <NEW_LINE> widgets = { "email": forms.TextInput(attrs={"class": "editContent", "placeholder": "Your Email..."}) } <NEW_LINE> labels = { "email": '' }
Форма подписки по email
62599077adb09d7d5dc0bee9
class RobustQueue(Queue): <NEW_LINE> <INDENT> def __init__(self, loop, future_store, channel, name, durable, exclusive, auto_delete, arguments): <NEW_LINE> <INDENT> super(RobustQueue, self).__init__(loop, future_store, channel, name or "amq_%s" % shortuuid.uuid(), durable, exclusive, auto_delete, arguments) <NEW_LINE> self._consumers = {} <NEW_LINE> self._bindings = {} <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def on_reconnect(self, channel): <NEW_LINE> <INDENT> self._futures.reject_all(compat.ConnectionError("Auto Reconnect Error")) <NEW_LINE> self._channel = channel._channel <NEW_LINE> yield self.declare() <NEW_LINE> for item, kwargs in self._bindings.items(): <NEW_LINE> <INDENT> exchange, routing_key = item <NEW_LINE> yield self.bind(exchange, routing_key, **kwargs) <NEW_LINE> <DEDENT> for consumer_tag, kwargs in tuple(self._consumers.items()): <NEW_LINE> <INDENT> yield self.consume(consumer_tag=consumer_tag, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> @gen.coroutine <NEW_LINE> def bind(self, exchange, routing_key=None, arguments=None, timeout=None): <NEW_LINE> <INDENT> kwargs = dict(arguments=arguments, timeout=timeout) <NEW_LINE> result = yield super(RobustQueue, self).bind(exchange=exchange, routing_key=routing_key, **kwargs) <NEW_LINE> self._bindings[(exchange, routing_key)] = kwargs <NEW_LINE> raise gen.Return(result) <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def unbind(self, exchange, routing_key, arguments=None, timeout=None): <NEW_LINE> <INDENT> result = yield super(RobustQueue, self).unbind(exchange, routing_key, arguments, timeout) <NEW_LINE> self._bindings.pop((exchange, routing_key), None) <NEW_LINE> raise gen.Return(result) <NEW_LINE> <DEDENT> @tools.coroutine <NEW_LINE> def consume(self, callback, no_ack=False, exclusive=False, arguments=None, consumer_tag=None, timeout=None): <NEW_LINE> <INDENT> kwargs = dict(callback=callback, no_ack=no_ack, exclusive=exclusive, arguments=arguments) <NEW_LINE> consumer_tag = yield super(RobustQueue, self).consume(consumer_tag=consumer_tag, **kwargs) <NEW_LINE> self._consumers[consumer_tag] = kwargs <NEW_LINE> raise gen.Return(consumer_tag) <NEW_LINE> <DEDENT> @gen.coroutine <NEW_LINE> def cancel(self, consumer_tag, timeout=None): <NEW_LINE> <INDENT> result = yield super(RobustQueue, self).cancel(consumer_tag, timeout) <NEW_LINE> self._consumers.pop(consumer_tag, None) <NEW_LINE> raise gen.Return(result)
A queue that, if the connection drops, will recreate itself once it's back up
625990774a966d76dd5f086b
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE) <NEW_LINE> class TestPeerGradingFound(ModuleStoreTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.course_name = 'edX/open_ended_nopath/2012_Fall' <NEW_LINE> self.course = modulestore().get_course(self.course_name) <NEW_LINE> <DEDENT> def test_peer_grading_nopath(self): <NEW_LINE> <INDENT> found, url = views.find_peer_grading_module(self.course) <NEW_LINE> self.assertEqual(found, False)
Test to see if peer grading modules can be found properly.
625990773539df3088ecdc17
class TVshow: <NEW_LINE> <INDENT> def __init__(self, show): <NEW_LINE> <INDENT> self.__show1 = show <NEW_LINE> <DEDENT> @property <NEW_LINE> def show2(self): <NEW_LINE> <INDENT> return self.__show1
显示电视节目,属性设置为私有的,限制在函数体外修改,只读模式
62599077379a373c97d9a9a2
class Test_Geom_BSplineSurface(unittest.TestCase): <NEW_LINE> <INDENT> def test_Weights(self): <NEW_LINE> <INDENT> s1 = Geom_SphericalSurface(gp_Ax3(), 1.) <NEW_LINE> s2 = Geom_RectangularTrimmedSurface(s1, 0., 1., 0., 1.) <NEW_LINE> s3 = GeomConvert.SurfaceToBSplineSurface_(s2) <NEW_LINE> weights = TColStd_Array2OfReal(1, s3.NbUPoles(), 1, s3.NbVPoles()) <NEW_LINE> s3.Weights(weights) <NEW_LINE> self.assertEqual(weights.Size(), 9) <NEW_LINE> self.assertAlmostEqual(weights.Value(1, 1), 1.0) <NEW_LINE> self.assertAlmostEqual(weights.Value(3, 3), 1.0) <NEW_LINE> <DEDENT> def test_Weights_const(self): <NEW_LINE> <INDENT> s1 = Geom_SphericalSurface(gp_Ax3(), 1.) <NEW_LINE> s2 = Geom_RectangularTrimmedSurface(s1, 0., 1., 0., 1.) <NEW_LINE> s3 = GeomConvert.SurfaceToBSplineSurface_(s2) <NEW_LINE> weights = s3.Weights() <NEW_LINE> self.assertEqual(weights.Size(), 9) <NEW_LINE> self.assertAlmostEqual(weights.Value(1, 1), 1.0) <NEW_LINE> self.assertAlmostEqual(weights.Value(3, 3), 1.0)
Test for Geom_BSplineSurface class.
625990779c8ee82313040e47
class RFFT(Layer): <NEW_LINE> <INDENT> def rfft(self, x, fft_fn): <NEW_LINE> <INDENT> resh = K.cast(K.map_fn(K.transpose, x), dtype='complex64') <NEW_LINE> spec = K.abs(K.map_fn(fft_fn, resh)) <NEW_LINE> out = K.cast(K.map_fn(K.transpose, spec), dtype='float32') <NEW_LINE> shape = tf.shape(out) <NEW_LINE> new_shape = [shape[0], shape[1] // 2, shape[2]] <NEW_LINE> out_real = tf.slice(out, [0, 0, 0], new_shape) <NEW_LINE> return out_real <NEW_LINE> <DEDENT> def call(self, x): <NEW_LINE> <INDENT> return Lambda(self.rfft, arguments={'fft_fn': K.tf.fft})(x) <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> if input_shape[1] is None: <NEW_LINE> <INDENT> return input_shape <NEW_LINE> <DEDENT> return (input_shape[0], input_shape[1] // 2, input_shape[2])
Keras layer for one-dimensional discrete Fourier Transform for real input. Computes rfft transforn on each slice along last dim. Input shape 3D tensor (batch_size, signal_length, nb_channels) Output shape 3D tensor (batch_size, int(signal_length / 2), nb_channels)
625990775fdd1c0f98e5f8fe
class LearningStyleHistory(db.Model): <NEW_LINE> <INDENT> recorded = db.DateTimeProperty(auto_now_add=True) <NEW_LINE> user = db.ReferenceProperty(Student) <NEW_LINE> intelligent_type = db.ReferenceProperty(IntelligentType) <NEW_LINE> realized_test = db.ReferenceProperty(IntelligentTest)
Learning style history
62599077bf627c535bcb2e4e
class SomeIntEnum(enum.Enum): <NEW_LINE> <INDENT> FOO = 1 <NEW_LINE> BAR = 2
An enum with int values
625990777d847024c075dd5b
class TestCreateFilesystemStructure(TankTestBase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestCreateFilesystemStructure, self).setUp() <NEW_LINE> self.setup_fixtures() <NEW_LINE> self.seq = {"type": "Sequence", "id": 2, "code": "seq_code", "project": self.project} <NEW_LINE> self.shot = {"type": "Shot", "id": 1, "code": "shot_code", "sg_sequence": self.seq, "project": self.project} <NEW_LINE> self.step = {"type": "Step", "id": 3, "code": "step_code", "short_name": "step_short_name"} <NEW_LINE> self.asset = {"type": "Asset", "id": 4, "sg_asset_type": "assettype", "code": "assetname", "project": self.project} <NEW_LINE> self.task = {"type":"Task", "id": 1, "content": "this task", "entity": self.shot, "step": {"type": "Step", "id": 3}, "project": self.project} <NEW_LINE> self.add_to_sg_mock_db([self.shot, self.seq, self.step, self.project, self.asset, self.task]) <NEW_LINE> <DEDENT> def test_create_task(self): <NEW_LINE> <INDENT> expected = os.path.join(self.project_root, "sequences", self.seq["code"], self.shot["code"]) <NEW_LINE> self.assertFalse(os.path.exists(expected)) <NEW_LINE> folder.process_filesystem_structure(self.tk, self.task["type"], self.task["id"], preview=False, engine=None) <NEW_LINE> self.assertTrue(os.path.exists(expected)) <NEW_LINE> <DEDENT> def test_create_shot(self): <NEW_LINE> <INDENT> expected = os.path.join(self.project_root, "sequences", self.seq["code"], self.shot["code"]) <NEW_LINE> self.assertFalse(os.path.exists(expected)) <NEW_LINE> folder.process_filesystem_structure(self.tk, self.shot["type"], self.shot["id"], preview=False, engine=None) <NEW_LINE> self.assertTrue(os.path.exists(expected)) <NEW_LINE> <DEDENT> def test_create_asset(self): <NEW_LINE> <INDENT> expected = os.path.join(self.project_root, "assets", self.asset["sg_asset_type"], self.asset["code"]) <NEW_LINE> self.assertFalse(os.path.exists(expected)) <NEW_LINE> folder.process_filesystem_structure(self.tk, self.asset["type"], self.asset["id"], preview=False, engine=None) <NEW_LINE> self.assertTrue(os.path.exists(expected)) <NEW_LINE> <DEDENT> def test_create_project(self): <NEW_LINE> <INDENT> expected = os.path.join(self.project_root, "reference", "artwork") <NEW_LINE> self.assertFalse(os.path.exists(expected)) <NEW_LINE> folder.process_filesystem_structure(self.tk, self.project["type"], self.project["id"], preview=False, engine=None) <NEW_LINE> self.assertTrue(os.path.exists(expected)) <NEW_LINE> <DEDENT> def test_create_sequence(self): <NEW_LINE> <INDENT> expected = os.path.join(self.project_root, "sequences", self.seq["code"]) <NEW_LINE> self.assertFalse(os.path.exists(expected)) <NEW_LINE> folder.process_filesystem_structure(self.tk, self.seq["type"], self.seq["id"], preview=False, engine=None) <NEW_LINE> self.assertTrue(os.path.exists(expected)) <NEW_LINE> <DEDENT> def test_wrong_type_entity_ids(self): <NEW_LINE> <INDENT> for bad_entity_ids in ["abab", self.shot, object()]: <NEW_LINE> <INDENT> self.assertRaises(ValueError, folder.process_filesystem_structure, self.tk, self.shot["type"], bad_entity_ids, preview=False, engine=None)
Tests of the function schema.create_folders.
6259907744b2445a339b761e
class TransplantMergeView(FormView): <NEW_LINE> <INDENT> form_class = UserMergeForm <NEW_LINE> success_url = settings.TRANSPLANT_SUCCESS_URL <NEW_LINE> template_name = 'transplant/merge.html' <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> FormView.__init__(self, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> receiver = self.request.user <NEW_LINE> donor = form.get_user() <NEW_LINE> operations = [o for o in settings.TRANSPLANT_OPERATIONS] <NEW_LINE> with transaction.commit_manually(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for operation in operations: <NEW_LINE> <INDENT> Surgery(operation[0], operation[1], **operation[2]).merge(receiver, donor) <NEW_LINE> <DEDENT> transaction.commit() <NEW_LINE> return super(TransplantMergeView, self).form_valid(form) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> transaction.rollback() <NEW_LINE> return self.dispatch_exception(e) <NEW_LINE> <DEDENT> <DEDENT> return super(TransplantMergeView, self).form_valid(form) <NEW_LINE> <DEDENT> def dispatch_exception(self, e): <NEW_LINE> <INDENT> if settings.DEBUG is True: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if settings.TRANSPLANT_FAILURE_URL is None: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponseRedirect(settings.TRANSPLANT_FAILURE_URL) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_form_kwargs(self): <NEW_LINE> <INDENT> form_kwargs = super(TransplantMergeView, self).get_form_kwargs() <NEW_LINE> form_kwargs.update({'prefix': 'merge'}) <NEW_LINE> return form_kwargs <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context_data = super(FormView, self).get_context_data(**kwargs) <NEW_LINE> context_data.update({'merge_form': context_data['form']}) <NEW_LINE> return context_data
View performing User merge using all operations defined in settings.TRANSPLANT_OPERATIONS. Handles transactions (rollback on any exception) and exceptions (see transplant.settings for full info). Uses django.contrib.auth.forms.AuthenticationForm by default, but any other Form that will conform to it's API can do (the get_user() method!)
625990778a349b6b43687bdb
class Durations(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> interval = db.Column(db.Integer) <NEW_LINE> time_req = db.Column(db.Integer) <NEW_LINE> connection_id = db.Column(db.Integer, db.ForeignKey('connections.id'))
Duration to travel connection at a given time bucket
62599077d268445f2663a81e
class Detector(object): <NEW_LINE> <INDENT> def __init__(self, threshold=None, sample_times=None): <NEW_LINE> <INDENT> self.threshold = threshold <NEW_LINE> self.sample_times = sample_times <NEW_LINE> self.sample_points = None <NEW_LINE> <DEDENT> def get_sample_points(self, position): <NEW_LINE> <INDENT> if self.sample_points is None: <NEW_LINE> <INDENT> self.sample_points = [(t,) + position(t) for t in self.sample_times] <NEW_LINE> <DEDENT> return self.sample_points <NEW_LINE> <DEDENT> def get_detected_signal(self, signal, position, interp_method, min_distance): <NEW_LINE> <INDENT> pts = self.get_sample_points(position) <NEW_LINE> if len(pts) == 0: <NEW_LINE> <INDENT> return pd.Series() <NEW_LINE> <DEDENT> signal_sample = self._get_signal_at_sample_points(signal, pts, interp_method, min_distance) <NEW_LINE> if len(signal_sample) == 0: <NEW_LINE> <INDENT> return pd.Series() <NEW_LINE> <DEDENT> signal_sample = signal_sample.reset_index() <NEW_LINE> if set(['X', 'Y', 'Z']) < set(list(signal_sample.columns)): <NEW_LINE> <INDENT> signal_sample.drop(['X', 'Y', 'Z'], inplace=True, axis=1) <NEW_LINE> <DEDENT> elif set(['Node']) < set(list(signal_sample.columns)): <NEW_LINE> <INDENT> signal_sample.drop(['Node'], inplace=True, axis=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Unrecognized signal format') <NEW_LINE> return <NEW_LINE> <DEDENT> signal_sample = signal_sample.set_index('T') <NEW_LINE> signal_sample = signal_sample[signal_sample >= self.threshold] <NEW_LINE> if len(signal_sample) == 0: <NEW_LINE> <INDENT> return pd.Series() <NEW_LINE> <DEDENT> signal_sample.columns.name = 'Scenario' <NEW_LINE> return signal_sample.stack() <NEW_LINE> <DEDENT> def _get_signal_at_sample_points(self, signal, sample_points, interp_method, min_distance): <NEW_LINE> <INDENT> raise NotImplementedError()
Defines a sensor's detector. Parameters ---------- threshold : int The minimum signal that can be detected by the sensor sample_times : list of ints or floats List of the sensor's sample/measurement times
6259907767a9b606de547765
class OMPSerializer(serializers.HyperlinkedModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.Book <NEW_LINE> fields = ( 'id', 'slug', 'prefix', 'title', 'subtitle', 'cover', 'submission_date', 'publication_date', 'license', 'pages', 'book_type', 'author', 'description', 'keywords', 'subject', 'languages', 'review_type', 'stage', 'identifier', ) <NEW_LINE> <DEDENT> license = serializers.ReadOnlyField( source='license.code', ) <NEW_LINE> author = AuthorSerializer( many=True, ) <NEW_LINE> keywords = KeywordSerializer( many=True, ) <NEW_LINE> subject = SubjectSerializer( many=True, ) <NEW_LINE> stage = StageSerializer( many=False, ) <NEW_LINE> identifier = IdentiferSerializer( many=True, source='identifier_set', required=False, ) <NEW_LINE> languages = LanguageSerializer( many=True, ) <NEW_LINE> def create(self, validated_data): <NEW_LINE> <INDENT> author_data = validated_data.pop('author') <NEW_LINE> keyword_data = validated_data.pop('keywords') <NEW_LINE> lang_data = validated_data.pop('languages') <NEW_LINE> subject_data = validated_data.pop('subject') <NEW_LINE> validated_data.pop('stage') <NEW_LINE> book = models.Book.objects.create(**validated_data) <NEW_LINE> stage = models.Stage.objects.create(current_stage="published") <NEW_LINE> book.stage = stage <NEW_LINE> for author in author_data: <NEW_LINE> <INDENT> author = models.Author.objects.create(**author) <NEW_LINE> book.author.add(author) <NEW_LINE> <DEDENT> for language in lang_data: <NEW_LINE> <INDENT> lang, c = models.Language.objects.get_or_create(**language) <NEW_LINE> book.languages.add(lang) <NEW_LINE> <DEDENT> for subject in subject_data: <NEW_LINE> <INDENT> subj, c = models.Subject.objects.get_or_create(**subject) <NEW_LINE> book.subject.add(subj) <NEW_LINE> <DEDENT> for keyword in keyword_data: <NEW_LINE> <INDENT> keyw, c = models.Keyword.objects.get_or_create(**keyword) <NEW_LINE> book.keywords.add(keyw) <NEW_LINE> <DEDENT> book.save() <NEW_LINE> return book
This serializer is used only by Ubiquity Press.
6259907732920d7e50bc79c9
class HomeAssistantQueueHandler(logging.handlers.QueueHandler): <NEW_LINE> <INDENT> def handle(self, record: logging.LogRecord) -> Any: <NEW_LINE> <INDENT> return_value = self.filter(record) <NEW_LINE> if return_value: <NEW_LINE> <INDENT> self.emit(record) <NEW_LINE> <DEDENT> return return_value
Process the log in another thread.
625990775fdd1c0f98e5f8ff
class Return(Action): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> def perform(self, token_stream, text): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> def same_as(self, other): <NEW_LINE> <INDENT> return isinstance(other, Return) and self.value == other.value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Return(%r)" % self.value
Internal Plex action which causes |value| to be returned as the value of the associated token
62599077a05bb46b3848bdeb
class Task(ClassTemplate): <NEW_LINE> <INDENT> pass
dnacsdk interaction with Task API on DNA Center.
62599077167d2b6e312b8252
class ContainedInLocationNegated(Relation): <NEW_LINE> <INDENT> relation_name = 'contained_in_location_negated'
contained_in_location_negated relation.
6259907755399d3f05627e95
class Merge: <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> <DEDENT> @commands.group(pass_context=True, invoke_without_command=True) <NEW_LINE> @commands.cooldown(1, 5) <NEW_LINE> async def merge(self, ctx, *urls:str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if urls and 'vertical' in urls: <NEW_LINE> <INDENT> vertical = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vertical = False <NEW_LINE> <DEDENT> get_images = await self.get_images(ctx, urls=urls, limit=20) <NEW_LINE> if get_images and len(get_images) == 1: <NEW_LINE> <INDENT> await self.bot.say('You gonna merge one image?') <NEW_LINE> return <NEW_LINE> <DEDENT> elif not get_images: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> xx = await self.bot.send_message(ctx.message.channel, "ok, processing") <NEW_LINE> count = 0 <NEW_LINE> list_im = [] <NEW_LINE> for url in get_images: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> b = await self.bytes_download(url) <NEW_LINE> if sys.getsizeof(b) == 215: <NEW_LINE> <INDENT> await self.bot.say(":no_entry: Image `{0}` is invalid!".format(str(count))) <NEW_LINE> continue <NEW_LINE> <DEDENT> list_im.append(b) <NEW_LINE> <DEDENT> imgs = [PIL.Image.open(i).convert('RGBA') for i in list_im] <NEW_LINE> if vertical: <NEW_LINE> <INDENT> max_shape = sorted([(np.sum(i.size), i.size) for i in imgs])[1][1] <NEW_LINE> imgs_comb = np.vstack((np.asarray(i.resize(max_shape)) for i in imgs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> min_shape = sorted([(np.sum(i.size), i.size) for i in imgs])[0][1] <NEW_LINE> imgs_comb = np.hstack((np.asarray(i.resize(min_shape)) for i in imgs)) <NEW_LINE> <DEDENT> imgs_comb = PIL.Image.fromarray(imgs_comb) <NEW_LINE> final = BytesIO() <NEW_LINE> imgs_comb.save(final, 'png') <NEW_LINE> final.seek(0) <NEW_LINE> await self.bot.delete_message(xx) <NEW_LINE> await self.bot.upload(final, filename='merge.png') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> await self.bot.say(code.format(e))
Merge avatars
6259907756ac1b37e63039a3
class UserViewSet(BaseViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all().order_by('-date_joined') <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> def current(self, request): <NEW_LINE> <INDENT> user = UserUtils.get_user_from_request(request) <NEW_LINE> self.queryset = User.objects.filter(id=user.id)
API endpoint that allows users to be viewed or edited.
6259907760cbc95b06365a2e
class WitnessUpdateBlock(Block): <NEW_LINE> <INDENT> fields_to_id = ['block_signing_key']
Class to save witness_update operation
62599077cc0a2c111447c792
class Node(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.connections = [] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Node of a tree. Contains references of other nodes with witch has output connection. Just do A.connections.append(B) for create edge like A->B.
625990774a966d76dd5f086c
class StockProfileModel(models.Model): <NEW_LINE> <INDENT> tickerName = models.CharField(max_length=10, primary_key=True) <NEW_LINE> fullName = models.CharField(max_length=50) <NEW_LINE> overview = models.CharField(max_length=2000) <NEW_LINE> founded = models.CharField(max_length=4) <NEW_LINE> category= models.CharField(max_length=50) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.fullName
Model representing basic information about a stock including abbreviated name and full name
625990772c8b7c6e89bd516b
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError("Email field is required") <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user = self.create_user(email=email, name=name, password=password) <NEW_LINE> user.is_superuser = True <NEW_LINE> user.is_staff = True <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user
Manager for our user model Args: BaseUserManager (UserManager): Contributed by django users
625990773539df3088ecdc19
class EtherCATHandTrajectorySlider(ExtendedSlider): <NEW_LINE> <INDENT> def __init__(self, joint, uiFile, plugin_parent, parent=None): <NEW_LINE> <INDENT> ExtendedSlider.__init__(self, joint, uiFile, plugin_parent, parent) <NEW_LINE> self.initialize_controller() <NEW_LINE> <DEDENT> def initialize_controller(self): <NEW_LINE> <INDENT> self.slider.setMinimum(self.joint.min) <NEW_LINE> self.slider.setMaximum(self.joint.max) <NEW_LINE> self.min_label.setText(str(self.joint.min)) <NEW_LINE> self.max_label.setText(str(self.joint.max)) <NEW_LINE> self.pub = self.joint.controller.cmd_publisher <NEW_LINE> self.set_slider_behaviour() <NEW_LINE> self.joint.controller.subscribe_status_cb_list.append(self._state_cb) <NEW_LINE> <DEDENT> def _state_cb(self, msg): <NEW_LINE> <INDENT> self.state = msg.actual.positions[ msg.joint_names.index(self.joint.name)] <NEW_LINE> <DEDENT> def sendupdate(self, value): <NEW_LINE> <INDENT> if self.joint.controller.traj_target.joint_names and self.joint.controller.traj_target.points: <NEW_LINE> <INDENT> self.joint.controller.traj_target.points[0].positions[ self.joint.controller.traj_target.joint_names.index(self.joint.name)] = radians(float(value)) <NEW_LINE> self.pub.publish(self.joint.controller.traj_target) <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.current_value = round(degrees(self.state), 1) <NEW_LINE> self.value.setText("Val: " + str(self.current_value)) <NEW_LINE> if not self.first_update_done: <NEW_LINE> <INDENT> self.slider.setSliderPosition(self.current_value) <NEW_LINE> self.slider.setValue(self.current_value) <NEW_LINE> self.target.setText("Tgt: " + str(self.current_value)) <NEW_LINE> self.first_update_done = True <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> self.slider.setSliderPosition(self.current_value) <NEW_LINE> self.slider.setValue(self.current_value) <NEW_LINE> self.target.setText("Tgt: " + str(self.current_value)) <NEW_LINE> <DEDENT> def set_slider_behaviour(self): <NEW_LINE> <INDENT> if (self.joint.controller.controller_category == "position_trajectory"): <NEW_LINE> <INDENT> if self.pos_slider_tracking_behaviour: <NEW_LINE> <INDENT> self.slider.setTracking(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.slider.setTracking(False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_slider_released(self): <NEW_LINE> <INDENT> pass
Slider for one EtherCAT Hand joint, that uses the trajectory controller interface.
625990779c8ee82313040e48
class TestSubtasks(InstructorTaskCourseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestSubtasks, self).setUp() <NEW_LINE> self.initialize_course() <NEW_LINE> <DEDENT> def _enroll_students_in_course(self, course_id, num_students): <NEW_LINE> <INDENT> for _ in range(num_students): <NEW_LINE> <INDENT> random_id = uuid4().hex[:8] <NEW_LINE> self.create_student(username='student{0}'.format(random_id)) <NEW_LINE> <DEDENT> <DEDENT> def _queue_subtasks(self, create_subtask_fcn, items_per_task, initial_count, extra_count): <NEW_LINE> <INDENT> task_id = str(uuid4()) <NEW_LINE> instructor_task = InstructorTaskFactory.create( course_id=self.course.id, task_id=task_id, task_key='dummy_task_key', task_type='bulk_course_email', ) <NEW_LINE> self._enroll_students_in_course(self.course.id, initial_count) <NEW_LINE> task_querysets = [CourseEnrollment.objects.filter(course_id=self.course.id)] <NEW_LINE> def initialize_subtask_info(*args): <NEW_LINE> <INDENT> self._enroll_students_in_course(self.course.id, extra_count) <NEW_LINE> return {} <NEW_LINE> <DEDENT> with patch('instructor_task.subtasks.initialize_subtask_info') as mock_initialize_subtask_info: <NEW_LINE> <INDENT> mock_initialize_subtask_info.side_effect = initialize_subtask_info <NEW_LINE> queue_subtasks_for_query( entry=instructor_task, action_name='action_name', create_subtask_fcn=create_subtask_fcn, item_querysets=task_querysets, item_fields=[], items_per_task=items_per_task, ) <NEW_LINE> <DEDENT> <DEDENT> def test_queue_subtasks_for_query1(self): <NEW_LINE> <INDENT> mock_create_subtask_fcn = Mock() <NEW_LINE> self._queue_subtasks(mock_create_subtask_fcn, 3, 7, 1) <NEW_LINE> mock_create_subtask_fcn_args = mock_create_subtask_fcn.call_args_list <NEW_LINE> self.assertEqual(len(mock_create_subtask_fcn_args[0][0][0]), 3) <NEW_LINE> self.assertEqual(len(mock_create_subtask_fcn_args[1][0][0]), 3) <NEW_LINE> self.assertEqual(len(mock_create_subtask_fcn_args[2][0][0]), 2) <NEW_LINE> <DEDENT> def test_queue_subtasks_for_query2(self): <NEW_LINE> <INDENT> mock_create_subtask_fcn = Mock() <NEW_LINE> self._queue_subtasks(mock_create_subtask_fcn, 3, 8, 3) <NEW_LINE> mock_create_subtask_fcn_args = mock_create_subtask_fcn.call_args_list <NEW_LINE> self.assertEqual(len(mock_create_subtask_fcn_args[0][0][0]), 3) <NEW_LINE> self.assertEqual(len(mock_create_subtask_fcn_args[1][0][0]), 3) <NEW_LINE> self.assertEqual(len(mock_create_subtask_fcn_args[2][0][0]), 5)
Tests for subtasks.
625990777d847024c075dd5d
class BraintreeConfig(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.public_key = settings.BRAINTREE_PUBLIC_KEY <NEW_LINE> self.private_key = settings.BRAINTREE_PRIVATE_KEY
Rather than trying to recreate the entire Environment, which needs information we don't have, just create an object which contains the parts we need to parse webhooks, namely the public and private keys.
62599077a8370b77170f1d4f
class AutoConfig(Config): <NEW_LINE> <INDENT> USERNAME = '[email protected]' <NEW_LINE> PASSWORD = '123456' <NEW_LINE> USER_ID = '13764904' <NEW_LINE> MEMBER_ID = '380' <NEW_LINE> SHOP_ID = 98204 <NEW_LINE> TERMINAL_ID = 109828 <NEW_LINE> URL = 'http://testwkd.snsshop.net' <NEW_LINE> URL_TERMINAL = 'http://testwkdshopadmin.snsshop.net' <NEW_LINE> WX_URL = 'http://wkdianshang.testwkdwx.snsshop.net/wkdianshang' <NEW_LINE> TITLE = u"微客多后台管理系统"
微客多自动化测试环境配置(自动化测试专用)
62599077be8e80087fbc0a16
class Price03(Benchmark): <NEW_LINE> <INDENT> def __init__(self, dimensions=2): <NEW_LINE> <INDENT> Benchmark.__init__(self, dimensions) <NEW_LINE> self.bounds = list(zip([-50.0] * self.dimensions, [ 50.0] * self.dimensions)) <NEW_LINE> self.custom_bounds = [(0, 2), (0, 2)] <NEW_LINE> self.global_optimum = [1.0, 1.0] <NEW_LINE> self.fglob = 0.0 <NEW_LINE> <DEDENT> def evaluator(self, x, *args): <NEW_LINE> <INDENT> self.fun_evals += 1 <NEW_LINE> x1, x2 = x <NEW_LINE> return 100.0*(x2 - x1**2.0)**2.0 + (6.4*(x2 - 0.5)**2.0 - x1 - 0.6)**2.0
Price 3 test objective function. This class defines the Price 3 global optimization problem. This is a multimodal minimization problem defined as follows: .. math:: f_{\text{Price03}}(\mathbf{x}) = 100(x_2 - x_1^2)^2 + \left[6.4(x_2 - 0.5)^2 - x_1 - 0.6 \right]^2 Here, :math:`n` represents the number of dimensions and :math:`x_i \in [-50, 50]` for :math:`i=1,2`. .. figure:: figures/Price03.png :alt: Price 3 function :align: center **Two-dimensional Price 3 function** *Global optimum*: :math:`f(x_i) = 0` for :math:`\mathbf{x} = [-5, -5]`, :math:`\mathbf{x} = [-5, 5]`, :math:`\mathbf{x} = [5, -5]`, :math:`\mathbf{x} = [5, 5]`
6259907776e4537e8c3f0f01
class cached_property(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.__doc__ = getattr(func, '__doc__') <NEW_LINE> self.func = func <NEW_LINE> <DEDENT> def __get__(self, obj, cls): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> value = obj.__dict__[self.func.__name__] = self.func(obj) <NEW_LINE> return value
A property that is only computed once per instance and then replaces itself with an ordinary attribute. Deleting the attribute resets the property. This genius snippet is taken form: https://github.com/bottlepy/bottle/commit/fa7733e075da0d790d809aa3d2f53071897e6f76
62599077bf627c535bcb2e50
class Hourglass(nn.Module): <NEW_LINE> <INDENT> def __init__(self, down_seq, up_seq, skip_seq, merge_type="add", return_first_skip=False): <NEW_LINE> <INDENT> super(Hourglass, self).__init__() <NEW_LINE> self.depth = len(down_seq) <NEW_LINE> assert (merge_type in ["cat", "add"]) <NEW_LINE> assert (len(up_seq) == self.depth) <NEW_LINE> assert (len(skip_seq) in (self.depth, self.depth + 1)) <NEW_LINE> self.merge_type = merge_type <NEW_LINE> self.return_first_skip = return_first_skip <NEW_LINE> self.extra_skip = (len(skip_seq) == self.depth + 1) <NEW_LINE> self.down_seq = down_seq <NEW_LINE> self.up_seq = up_seq <NEW_LINE> self.skip_seq = skip_seq <NEW_LINE> <DEDENT> def _merge(self, x, y): <NEW_LINE> <INDENT> if y is not None: <NEW_LINE> <INDENT> if self.merge_type == "cat": <NEW_LINE> <INDENT> x = torch.cat((x, y), dim=1) <NEW_LINE> <DEDENT> elif self.merge_type == "add": <NEW_LINE> <INDENT> x = x + y <NEW_LINE> <DEDENT> <DEDENT> return x <NEW_LINE> <DEDENT> def forward(self, x, **kwargs): <NEW_LINE> <INDENT> y = None <NEW_LINE> down_outs = [x] <NEW_LINE> for down_module in self.down_seq._modules.values(): <NEW_LINE> <INDENT> x = down_module(x) <NEW_LINE> down_outs.append(x) <NEW_LINE> <DEDENT> for i in range(len(down_outs)): <NEW_LINE> <INDENT> if i != 0: <NEW_LINE> <INDENT> y = down_outs[self.depth - i] <NEW_LINE> skip_module = self.skip_seq[self.depth - i] <NEW_LINE> y = skip_module(y) <NEW_LINE> x = self._merge(x, y) <NEW_LINE> <DEDENT> if i != len(down_outs) - 1: <NEW_LINE> <INDENT> if (i == 0) and self.extra_skip: <NEW_LINE> <INDENT> skip_module = self.skip_seq[self.depth] <NEW_LINE> x = skip_module(x) <NEW_LINE> <DEDENT> up_module = self.up_seq[self.depth - 1 - i] <NEW_LINE> x = up_module(x) <NEW_LINE> <DEDENT> <DEDENT> if self.return_first_skip: <NEW_LINE> <INDENT> return x, y <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return x
A hourglass module. Parameters: ---------- down_seq : nn.Sequential Down modules as sequential. up_seq : nn.Sequential Up modules as sequential. skip_seq : nn.Sequential Skip connection modules as sequential. merge_type : str, default 'add' Type of concatenation of up and skip outputs. return_first_skip : bool, default False Whether return the first skip connection output. Used in ResAttNet.
6259907791f36d47f2231b50
class InvalidShardId(Exception): <NEW_LINE> <INDENT> pass
Raised when an invalid shard ID is passed
62599077091ae356687065bd
class _420chanThreadExtractor(Extractor): <NEW_LINE> <INDENT> category = "420chan" <NEW_LINE> subcategory = "thread" <NEW_LINE> directory_fmt = ("{category}", "{board}", "{thread} {title}") <NEW_LINE> archive_fmt = "{board}_{thread}_{filename}" <NEW_LINE> pattern = r"(?:https?://)?boards\.420chan\.org/([^/?#]+)/thread/(\d+)" <NEW_LINE> test = ("https://boards.420chan.org/ani/thread/33251/chow-chows", { "pattern": r"https://boards\.420chan\.org/ani/src/\d+\.jpg", "content": "b07c803b0da78de159709da923e54e883c100934", "count": 2, }) <NEW_LINE> def __init__(self, match): <NEW_LINE> <INDENT> Extractor.__init__(self, match) <NEW_LINE> self.board, self.thread = match.groups() <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> url = "https://api.420chan.org/{}/res/{}.json".format( self.board, self.thread) <NEW_LINE> posts = self.request(url).json()["posts"] <NEW_LINE> data = { "board" : self.board, "thread": self.thread, "title" : posts[0].get("sub") or posts[0]["com"][:50], } <NEW_LINE> yield Message.Directory, data <NEW_LINE> for post in posts: <NEW_LINE> <INDENT> if "filename" in post: <NEW_LINE> <INDENT> post.update(data) <NEW_LINE> post["extension"] = post["ext"][1:] <NEW_LINE> url = "https://boards.420chan.org/{}/src/{}{}".format( post["board"], post["filename"], post["ext"]) <NEW_LINE> yield Message.Url, url, post
Extractor for 420chan threads
62599077442bda511e95da19
@attr('shard_1') <NEW_LINE> @ddt.ddt <NEW_LINE> class CourseInstantiationTests(ModuleStoreTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(CourseInstantiationTests, self).setUp() <NEW_LINE> self.factory = RequestFactory() <NEW_LINE> <DEDENT> @ddt.data(*itertools.product(xrange(5), [ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split], [None, 0, 5])) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_repeated_course_module_instantiation(self, loops, default_store, course_depth): <NEW_LINE> <INDENT> with modulestore().default_store(default_store): <NEW_LINE> <INDENT> course = CourseFactory.create() <NEW_LINE> chapter = ItemFactory(parent=course, category='chapter', graded=True) <NEW_LINE> section = ItemFactory(parent=chapter, category='sequential') <NEW_LINE> __ = ItemFactory(parent=section, category='problem') <NEW_LINE> <DEDENT> fake_request = self.factory.get( reverse('progress', kwargs={'course_id': unicode(course.id)}) ) <NEW_LINE> course = modulestore().get_course(course.id, depth=course_depth) <NEW_LINE> for _ in xrange(loops): <NEW_LINE> <INDENT> field_data_cache = FieldDataCache.cache_for_descriptor_descendents( course.id, self.user, course, depth=course_depth ) <NEW_LINE> course_module = get_module_for_descriptor( self.user, fake_request, course, field_data_cache, course.id ) <NEW_LINE> for chapter in course_module.get_children(): <NEW_LINE> <INDENT> for section in chapter.get_children(): <NEW_LINE> <INDENT> for item in section.get_children(): <NEW_LINE> <INDENT> self.assertTrue(item.graded)
Tests around instantiating a course multiple times in the same request.
6259907799fddb7c1ca63a97
class v_bytes(v_prim): <NEW_LINE> <INDENT> _vs_builder = True <NEW_LINE> def __init__(self, size=0, vbytes=None): <NEW_LINE> <INDENT> v_prim.__init__(self) <NEW_LINE> if vbytes == None: <NEW_LINE> <INDENT> vbytes = b'\x00' * size <NEW_LINE> <DEDENT> self._vs_length = len(vbytes) <NEW_LINE> self._vs_value = vbytes <NEW_LINE> self._vs_align = 1 <NEW_LINE> self._vs_fmt = '%ds' % self._vs_length <NEW_LINE> <DEDENT> def vsSetValue(self, val): <NEW_LINE> <INDENT> if len(val) != self._vs_length: <NEW_LINE> <INDENT> raise Exception('v_bytes field set to wrong length!') <NEW_LINE> <DEDENT> self._vs_value = val <NEW_LINE> <DEDENT> def vsParse(self, fbytes, offset=0): <NEW_LINE> <INDENT> offend = offset + self._vs_length <NEW_LINE> self._vs_value = fbytes[offset : offend] <NEW_LINE> return offend <NEW_LINE> <DEDENT> def vsEmit(self): <NEW_LINE> <INDENT> return self._vs_value <NEW_LINE> <DEDENT> def vsSetLength(self, size): <NEW_LINE> <INDENT> size = int(size) <NEW_LINE> self._vs_length = size <NEW_LINE> self._vs_fmt = '%ds' % size <NEW_LINE> b = self._vs_value[:size] <NEW_LINE> self._vs_value = b.ljust(size, b'\x00') <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return b2a_hex(self._vs_value).decode("ascii")
v_bytes is used for fixed width byte fields.
62599077a05bb46b3848bdec
class StyledForm(forms.ModelForm): <NEW_LINE> <INDENT> error_css_class = 'error' <NEW_LINE> required_css_class = 'required'
Base class for all our forms. Has Django automatically style the input fields based on state.
62599077ec188e330fdfa22a
class setMetaConf_result: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'o1', (MetaException, MetaException.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, o1=None,): <NEW_LINE> <INDENT> self.o1 = o1 <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.o1 = MetaException() <NEW_LINE> self.o1.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('setMetaConf_result') <NEW_LINE> if self.o1 is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('o1', TType.STRUCT, 1) <NEW_LINE> self.o1.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - o1
625990774a966d76dd5f086f
@register_plugin <NEW_LINE> class MLPPredictor(PredictorABC): <NEW_LINE> <INDENT> def __init__(self, data: DatasetSplits, model: torch.nn.Module): <NEW_LINE> <INDENT> super().__init__(vectorizer=data.vectorizer, model=model) <NEW_LINE> <DEDENT> def json_to_data(self, input_json: Dict): <NEW_LINE> <INDENT> return { 'x_in': torch.tensor([self.vectorizer.vectorize(input_string=input_string) for input_string in input_json['inputs']])} <NEW_LINE> <DEDENT> def output_to_json(self, outputs: List) -> Dict[str, Any]: <NEW_LINE> <INDENT> return { "outputs": outputs} <NEW_LINE> <DEDENT> def decode(self, output: torch.tensor) -> List[Dict[str, Any]]: <NEW_LINE> <INDENT> probabilities = torch.nn.functional.softmax(output, dim=1) <NEW_LINE> probability_values, indices = probabilities.max(dim=1) <NEW_LINE> return [{ "class": self.vectorizer.target_vocab.lookup_index(index=int(res[1])), "probability": float(res[0])} for res in zip(probability_values, indices)]
Toy example: we want to make predictions on inputs of the form {"inputs": ["hello world", "foo", "bar"]}
625990774a966d76dd5f086e
class TorchImageProcessor: <NEW_LINE> <INDENT> def __init__(self, image_size, is_color, mean, scale, crop_size=0, pad=28, color='BGR', use_cutout=False, use_mirroring=False, use_random_crop=False, use_center_crop=False, use_random_gray=False): <NEW_LINE> <INDENT> self.transf = transforms.ToTensor() <NEW_LINE> <DEDENT> def process(self, image_path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> image = cv2.imread(image_path) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> image = image_path <NEW_LINE> <DEDENT> if image is None: <NEW_LINE> <INDENT> print(image_path) <NEW_LINE> <DEDENT> return self.transf(image).numpy()
Simple data processors
625990772ae34c7f260aca69
class FastaReader(object): <NEW_LINE> <INDENT> def __init__(self, file, wholefile=False, keep_linebreaks=False): <NEW_LINE> <INDENT> if isinstance(file, str): <NEW_LINE> <INDENT> file = xopen(file, "r") <NEW_LINE> <DEDENT> self.fp = file <NEW_LINE> self.wholefile = wholefile <NEW_LINE> self.keep_linebreaks = keep_linebreaks <NEW_LINE> assert not (wholefile and keep_linebreaks), "not supported" <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self._wholefile_iter() if self.wholefile else self._streaming_iter() <NEW_LINE> <DEDENT> def _streaming_iter(self): <NEW_LINE> <INDENT> name = None <NEW_LINE> seq = "" <NEW_LINE> appendchar = '\n' if self.keep_linebreaks else '' <NEW_LINE> for line in self.fp: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if line and line[0] == ">": <NEW_LINE> <INDENT> if name is not None: <NEW_LINE> <INDENT> assert self.keep_linebreaks or seq.find('\n') == -1 <NEW_LINE> id = name.split()[0] <NEW_LINE> desc = " ".join(name.split()[1:]) <NEW_LINE> yield Sequence(id, desc, seq, None) <NEW_LINE> <DEDENT> name = line[1:] <NEW_LINE> seq = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seq += line + appendchar <NEW_LINE> <DEDENT> <DEDENT> if name is not None: <NEW_LINE> <INDENT> assert self.keep_linebreaks or seq.find('\n') == -1 <NEW_LINE> id = name.split()[0] <NEW_LINE> desc = " ".join(name.split()[1:]) <NEW_LINE> yield Sequence(id, desc, seq, None) <NEW_LINE> <DEDENT> <DEDENT> def _wholefile_iter(self): <NEW_LINE> <INDENT> wholefile = self.fp.read() <NEW_LINE> assert len(wholefile) != 0 and wholefile[0] == '>', "FASTA file must start with '>'" <NEW_LINE> parts = wholefile.split('\n>') <NEW_LINE> parts[0] = parts[0][1:] <NEW_LINE> for part in parts: <NEW_LINE> <INDENT> lines = part.split('\n', 1) <NEW_LINE> id = lines[0].split()[0] <NEW_LINE> desc = " ".join(lines[0].split()[1:]) <NEW_LINE> yield Sequence(id, desc, lines[1].replace('\n', ''), None) <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.fp is None: <NEW_LINE> <INDENT> raise ValueError("I/O operation on closed FastaReader") <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> self.fp.close()
Reader for FASTA files.
62599077aad79263cf43013c
class SectionHeading(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.contents = list() <NEW_LINE> self.style_name = None <NEW_LINE> self.section_number = None <NEW_LINE> self.title = None <NEW_LINE> self.section_points = [] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Section: {}: {}, paragraphs: {}'.format(self.section_number, self.title, self.paragraph_numbers) <NEW_LINE> <DEDENT> @property <NEW_LINE> def paragraph_numbers(self): <NEW_LINE> <INDENT> return [p for p in self.contents if isinstance(p, int)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def tables(self): <NEW_LINE> <INDENT> return [p for p in self.contents if isinstance(p, Table)] <NEW_LINE> <DEDENT> def add_contents(self, content): <NEW_LINE> <INDENT> self.contents.append(content) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create(number, paragraph): <NEW_LINE> <INDENT> section = SectionHeading() <NEW_LINE> section._contents = [number] <NEW_LINE> if paragraph is None: <NEW_LINE> <INDENT> return section <NEW_LINE> <DEDENT> section.style_name = paragraph.style.name <NEW_LINE> try: <NEW_LINE> <INDENT> assert 'heading ' in section.style_name.lower(), 'Heading style not found' <NEW_LINE> if isinstance(paragraph.text, list): <NEW_LINE> <INDENT> heading_txt = ascii_only(" ".join(paragraph.text)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> heading_txt = ascii_only(paragraph.text) <NEW_LINE> <DEDENT> headings = re.findall(r"[^\s']+", heading_txt) <NEW_LINE> assert len(headings) >= 2, "Foreign Heading string format: '{}'".format(paragraph.text) <NEW_LINE> section.section_number = headings[0] <NEW_LINE> section.title = ' '.join(headings[1:]) <NEW_LINE> section.section_points = [pt for pt in section.section_number.split('.')] <NEW_LINE> return section <NEW_LINE> <DEDENT> except Exception as _e: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def dump(self, prefix=" "): <NEW_LINE> <INDENT> tbls = [t for t in self.contents if isinstance(t, Table)] <NEW_LINE> print('{}Number : {} / pts: {}'.format(prefix, self.section_number, self.section_points)) <NEW_LINE> print('{}# Paragraphs: {}'.format(prefix, len(self.paragraph_numbers))) <NEW_LINE> print('{}# Tables : {}'.format(prefix, len(tbls))) <NEW_LINE> if len(tbls): <NEW_LINE> <INDENT> for tnum, tbl in enumerate(tbls): <NEW_LINE> <INDENT> print('{} List Entry: {}'.format(prefix, tnum)) <NEW_LINE> tbl.dump(prefix=" " + prefix) <NEW_LINE> <DEDENT> print('{}-------------------------------------'.format(prefix))
A section object holds both the title of a given section as well as the paragraph numbers of text and table entries within it (until the next section) NOTE: This should not be confused with the docx Section object that is provides page setting and format information
625990777c178a314d78e8ad
class Diff(ApplyManyTransform): <NEW_LINE> <INDENT> def __init__(self, order): <NEW_LINE> <INDENT> self.order = order <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return 'diff-%d' % self.order <NEW_LINE> <DEDENT> def apply_one(self, data, meta=None): <NEW_LINE> <INDENT> return np.diff(data, n=self.order, axis=data.ndim-1)
Wrapper for np.diff
6259907771ff763f4b5e9130
class IndexESKibana(Command): <NEW_LINE> <INDENT> min_args = max_args = 1 <NEW_LINE> arguments = "<instance id>" <NEW_LINE> indexer_name = None <NEW_LINE> serializer = "IKibanaIndexSerializable" <NEW_LINE> options = [ ( "no-index", { "type": "yn", "default": False, "help": "set to True if you only want to create views", }, ), ( "etypes", { "type": "csv", "default": "", "help": "only index given etypes [default:all indexable types]", }, ), ( "index-name", { "type": "string", "default": "", "help": "use a custom index name rather than the one " "specified in the all-in-one.conf file. ", }, ), ( "chunksize", { "type": "int", "default": 100000, "help": "max number of entities to fetch at once (deafult: 100000)", }, ), ] <NEW_LINE> def run(self, args): <NEW_LINE> <INDENT> appid = args[0] <NEW_LINE> with admincnx(appid) as cnx: <NEW_LINE> <INDENT> indexer = cnx.vreg["es"].select(self.indexer_name, cnx) <NEW_LINE> index_name = self.config.index_name or indexer.index_name <NEW_LINE> print(""""{}" kibana index for {}""".format(index_name, ", ".join(indexer.etypes))) <NEW_LINE> es = indexer.get_connection() <NEW_LINE> if not es and self.config.debug: <NEW_LINE> <INDENT> print("no elasticsearch configuration found, skipping") <NEW_LINE> return <NEW_LINE> <DEDENT> indexer.create_index(index_name) <NEW_LINE> self.update_sql_data(cnx) <NEW_LINE> if self.config.no_index: <NEW_LINE> <INDENT> print("do not index es") <NEW_LINE> return <NEW_LINE> <DEDENT> for _ in parallel_bulk( es, self.bulk_actions(cnx, es, indexer), raise_on_error=False, raise_on_exception=False, ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def bulk_actions(self, cnx, es, indexer): <NEW_LINE> <INDENT> index_name = self.config.index_name or indexer.index_name <NEW_LINE> etypes = self.config.etypes or indexer.etypes <NEW_LINE> for etype in etypes: <NEW_LINE> <INDENT> nb_entities = cnx.execute("Any COUNT(X) WHERE X is %s" % etype)[0][0] <NEW_LINE> print("\n-> indexing {} {}".format(nb_entities, etype)) <NEW_LINE> progress_bar = _tqdm(total=nb_entities) <NEW_LINE> for idx, entity in enumerate( indexable_entities(cnx, etype, chunksize=self.config.chunksize), 1 ): <NEW_LINE> <INDENT> serializer = entity.cw_adapt_to(self.serializer) <NEW_LINE> json = serializer.serialize(complete=False) <NEW_LINE> if json: <NEW_LINE> <INDENT> data = { "_op_type": "index", "_index": indexer.index_name, "_id": serializer.es_id, "_source": json, } <NEW_LINE> yield data <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> progress_bar.update() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> cnx.info("[{}] indexed {} {} entities".format(index_name, idx, etype)) <NEW_LINE> <DEDENT> <DEDENT> def update_sql_data(self, cnx): <NEW_LINE> <INDENT> raise NotImplementedError
Create indexes and index data monitoring in Kibana. <instance id> identifier of the instance
625990777047854f46340d3e
class MvcTemplateLoader(BaseLoader): <NEW_LINE> <INDENT> is_usable = True <NEW_LINE> __view_paths = None <NEW_LINE> def __init__(self, views_path): <NEW_LINE> <INDENT> self.views_path = views_path <NEW_LINE> if MvcTemplateLoader.__view_paths is None: <NEW_LINE> <INDENT> temp_paths = [] <NEW_LINE> for each_path in os.listdir(views_path): <NEW_LINE> <INDENT> if not each_path.startswith('.'): <NEW_LINE> <INDENT> full_path = os.path.join(views_path, each_path) <NEW_LINE> if each_path == "shared": <NEW_LINE> <INDENT> temp_paths.insert(0, full_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> temp_paths.append(full_path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> temp_paths.append(views_path) <NEW_LINE> MvcTemplateLoader.__view_paths = temp_paths <NEW_LINE> <DEDENT> <DEDENT> def get_template_sources(self, template_name): <NEW_LINE> <INDENT> for template_dir in MvcTemplateLoader.__view_paths: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> yield safe_join(template_dir, template_name) <NEW_LINE> <DEDENT> except UnicodeDecodeError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load_template_source(self, template_name, template_dirs=None): <NEW_LINE> <INDENT> tried = [] <NEW_LINE> for filepath in self.get_template_sources(template_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> file = open(filepath) <NEW_LINE> try: <NEW_LINE> <INDENT> return (file.read().decode(settings.FILE_CHARSET), filepath) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> file.close() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> tried.append(filepath) <NEW_LINE> <DEDENT> <DEDENT> error_msg = "Could not find %s in any of the views subdirectories." % template_name <NEW_LINE> raise TemplateDoesNotExist(error_msg) <NEW_LINE> <DEDENT> load_template_source.is_usable = True
A custom template loader for the MVCEngine framework.
62599077f9cc0f698b1c5f8e
class SeqReversible(Sequence): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def _seqtools_reversed(self): <NEW_LINE> <INDENT> raise NotImplementedError
Abstract Base Class for a Sequence that provides its own conversion to a sequence with order reversed, overriding the default behavior of the `Reversed` class. A Sequence class should only inherit from SeqReversible if it can be reversed in some way more efficient than making an explicit copy or iterating over indices in reverse order.
62599077be7bc26dc9252b17
class DummyNode(object): <NEW_LINE> <INDENT> pass
Description Args: arg1: help for arg1
625990774f88993c371f11e3
class Saver(object): <NEW_LINE> <INDENT> def __init__(self, logger: Logger, settings: SettingsNamespace, max_length: int): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> self.logger.debug('Creating a saver object') <NEW_LINE> self.settings = settings <NEW_LINE> self.meta_path = constants.MODEL_DIR + self.settings.model_name + '/' + constants.META <NEW_LINE> self.meta = self.load_meta(max_length) <NEW_LINE> <DEDENT> @debug() <NEW_LINE> def load_meta(self, max_length: int) -> MetaInfo: <NEW_LINE> <INDENT> if os.path.isfile(self.meta_path): <NEW_LINE> <INDENT> with open(self.meta_path, 'rb') as meta_file: <NEW_LINE> <INDENT> meta_info = dill.load(meta_file) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> meta_info = MetaInfo(self.settings, max_length) <NEW_LINE> <DEDENT> return meta_info <NEW_LINE> <DEDENT> @debug() <NEW_LINE> def save_meta(self, new_info: list): <NEW_LINE> <INDENT> self.meta.update(new_info) <NEW_LINE> with open(self.meta_path, 'wb') as meta_file: <NEW_LINE> <INDENT> dill.dump(obj=self.meta, file=meta_file) <NEW_LINE> <DEDENT> <DEDENT> @debug() <NEW_LINE> def save_model(self, model, meta_info: MetaInfo, best_weights: bool = False): <NEW_LINE> <INDENT> self.save_meta(meta_info) <NEW_LINE> weights = model.variables.get_weights() <NEW_LINE> run_dir = self.meta.latest()[constants.DIR] <NEW_LINE> if best_weights: <NEW_LINE> <INDENT> with open(run_dir + constants.BEST_WEIGHTS, 'wb') as weights_file: <NEW_LINE> <INDENT> dill.dump(weights, weights_file) <NEW_LINE> <DEDENT> <DEDENT> with open(run_dir + constants.LATEST_WEIGHTS, 'wb') as weights_file: <NEW_LINE> <INDENT> dill.dump(weights, weights_file) <NEW_LINE> <DEDENT> <DEDENT> @debug() <NEW_LINE> def load_model(self, model, best_weights: bool = False): <NEW_LINE> <INDENT> if best_weights: <NEW_LINE> <INDENT> self.logger.info('Loading the weights that produced the best accuracy') <NEW_LINE> weights_path = self.meta.latest()[constants.DIR] + constants.BEST_WEIGHTS <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.info('Loading the latest saved weights') <NEW_LINE> weights_path = self.meta.latest()[constants.DIR] + constants.LATEST_WEIGHTS <NEW_LINE> <DEDENT> if os.path.isfile(weights_path): <NEW_LINE> <INDENT> with open(weights_path, 'rb') as weights_file: <NEW_LINE> <INDENT> weights = dill.load(weights_file) <NEW_LINE> model.variables.set_weights(weights) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.info('Could not load weights: Weights not found')
Class for saving and loading the RNN model.
625990773346ee7daa338322
class ItemStatus(list, _List["OrderItemStatus"]): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> super().__init__([OrderItemStatus(datum) for datum in data]) <NEW_LINE> self.data = data
Detailed description of items order status.
6259907716aa5153ce401e5f
class TestV1ServiceStatus(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1ServiceStatus(self): <NEW_LINE> <INDENT> pass
V1ServiceStatus unit test stubs
625990772c8b7c6e89bd516e
class Solution: <NEW_LINE> <INDENT> def getNum(self, head): <NEW_LINE> <INDENT> sum = 0 <NEW_LINE> tmp = head <NEW_LINE> while tmp: <NEW_LINE> <INDENT> sum = sum * 10 + tmp.val <NEW_LINE> tmp = tmp.next <NEW_LINE> <DEDENT> return sum <NEW_LINE> <DEDENT> def addLists2(self, l1, l2): <NEW_LINE> <INDENT> num1 = self.getNum(l1) <NEW_LINE> num2 = self.getNum(l2) <NEW_LINE> sum = num1 + num2 <NEW_LINE> if sum == 0: <NEW_LINE> <INDENT> return ListNode(0) <NEW_LINE> <DEDENT> dummy = ListNode(-1) <NEW_LINE> while sum > 0: <NEW_LINE> <INDENT> num = sum % 10 <NEW_LINE> nxt = dummy.next <NEW_LINE> dummy.next = ListNode(num) <NEW_LINE> dummy.next.next = nxt <NEW_LINE> sum = sum / 10 <NEW_LINE> <DEDENT> return dummy.next
Given 6->1->7 + 2->9->5. That is, 617 + 295 Return 9->1->2. That is, 912
6259907732920d7e50bc79cd
class Order_Impl(Default, HasSide, HasPrice, HasVolume, Cancellable): <NEW_LINE> <INDENT> def __init__(self, side, price, volume, owner = None, volumeFilled = 0): <NEW_LINE> <INDENT> self._ticks = None <NEW_LINE> HasSide.__init__(self, side) <NEW_LINE> HasVolume.__init__(self, volume, volumeFilled) <NEW_LINE> Cancellable.__init__(self) <NEW_LINE> Default.__init__(self, owner) <NEW_LINE> HasPrice.__init__(self, price) <NEW_LINE> <DEDENT> def bind_ex(self, ctx): <NEW_LINE> <INDENT> self._bound_ex = True <NEW_LINE> <DEDENT> def reset_ex(self, ctx): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def ticks(self): <NEW_LINE> <INDENT> return self._ticks <NEW_LINE> <DEDENT> @ticks.setter <NEW_LINE> def ticks(self, value): <NEW_LINE> <INDENT> self._ticks = value <NEW_LINE> <DEDENT> def copyTo(self, dst): <NEW_LINE> <INDENT> HasSide.copyTo(self, dst) <NEW_LINE> HasVolume.copyTo(self, dst) <NEW_LINE> Cancellable.copyTo(self, dst) <NEW_LINE> HasPrice.copyTo(self, dst) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s_%s%s@%s" % (type(self).__name__, HasSide.__str__(self), HasVolume.__str__(self), HasPrice.__str__(self)) <NEW_LINE> <DEDENT> def With(self, side = None, price = None, volume = None): <NEW_LINE> <INDENT> def opt(a,b): <NEW_LINE> <INDENT> return a if b is None else b <NEW_LINE> <DEDENT> return Order_Impl(opt(self.side, side), opt(self.price, price), opt(self.volumeUnmatched, volume)) <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return Order_Impl(self.side, self.price, self.volumeUnmatched, self.owner, self.volumeFilled) <NEW_LINE> <DEDENT> def processIn(self, orderBook): <NEW_LINE> <INDENT> orderBook.processLimitOrder(self) <NEW_LINE> <DEDENT> def canBeMatched(self, other): <NEW_LINE> <INDENT> assert other.side == self.side.opposite <NEW_LINE> return not self.side.better(other.price, self.price) <NEW_LINE> <DEDENT> def matchWith(self, other): <NEW_LINE> <INDENT> v = min(self.volumeUnmatched, other.volumeUnmatched) <NEW_LINE> p = self.price <NEW_LINE> pv = (p,v) <NEW_LINE> self.onMatchedWith(p,v) <NEW_LINE> other.onMatchedWith(p,v) <NEW_LINE> return pv <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def Buy(price, volume): return Order_Impl(Side.Buy, price, volume) <NEW_LINE> @staticmethod <NEW_LINE> def Sell(price, volume): return Order_Impl(Side.Sell, price, volume)
Limit order of the given *side*, *price* and *volume*
6259907763b5f9789fe86aea
class CacheEntry(object): <NEW_LINE> <INDENT> __slots__ = [ 'dirty', 'inode', 'blockno', 'last_access', 'size', 'pos', 'fh', 'removed' ] <NEW_LINE> def __init__(self, inode, blockno, filename): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.fh = open(filename, "w+b", 0) <NEW_LINE> self.dirty = False <NEW_LINE> self.inode = inode <NEW_LINE> self.blockno = blockno <NEW_LINE> self.last_access = 0 <NEW_LINE> self.pos = 0 <NEW_LINE> self.size = os.fstat(self.fh.fileno()).st_size <NEW_LINE> <DEDENT> def read(self, size=None): <NEW_LINE> <INDENT> buf = self.fh.read(size) <NEW_LINE> self.pos += len(buf) <NEW_LINE> return buf <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> self.fh.flush() <NEW_LINE> <DEDENT> def seek(self, off): <NEW_LINE> <INDENT> if self.pos != off: <NEW_LINE> <INDENT> self.fh.seek(off) <NEW_LINE> self.pos = off <NEW_LINE> <DEDENT> <DEDENT> def tell(self): <NEW_LINE> <INDENT> return self.pos <NEW_LINE> <DEDENT> def truncate(self, size=None): <NEW_LINE> <INDENT> self.dirty = True <NEW_LINE> self.fh.truncate(size) <NEW_LINE> if size is None: <NEW_LINE> <INDENT> if self.pos < self.size: <NEW_LINE> <INDENT> self.size = self.pos <NEW_LINE> <DEDENT> <DEDENT> elif size < self.size: <NEW_LINE> <INDENT> self.size = size <NEW_LINE> <DEDENT> <DEDENT> def write(self, buf): <NEW_LINE> <INDENT> self.dirty = True <NEW_LINE> self.fh.write(buf) <NEW_LINE> self.pos += len(buf) <NEW_LINE> self.size = max(self.pos, self.size) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.fh.close() <NEW_LINE> <DEDENT> def unlink(self): <NEW_LINE> <INDENT> os.unlink(self.fh.name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('<%sCacheEntry, inode=%d, blockno=%d>' % ('Dirty ' if self.dirty else '', self.inode, self.blockno))
An element in the block cache Attributes: ----------- :dirty: entry has been changed since it was last uploaded. :size: current file size :pos: current position in file
625990775166f23b2e244d5c
class CreateOverridesIfReqdForm(BrowserView): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> navroot = self.context <NEW_LINE> overridesItem = navroot.get(OVERRIDES_FIXED_ID) <NEW_LINE> if overridesItem is None: <NEW_LINE> <INDENT> overridesItem = createContentInContainer( navroot, 'ftw.logo.ManualOverrides', checkConstraints=False, id=OVERRIDES_FIXED_ID, title='Logo and Icon Overrides', description='Manual overrides for the site logo(s) and icons', ) <NEW_LINE> transaction.get().commit() <NEW_LINE> self.request.response.redirect('{}/@@edit'.format( overridesItem.absolute_url_path() )) <NEW_LINE> return <NEW_LINE> <DEDENT> self.request.response.redirect('{}/view'.format( overridesItem.absolute_url_path() )) <NEW_LINE> return
Create IManualOverrides if it does not exist and redirect to it's edit form
625990773317a56b869bf208
class GetEquipmentList(core.TestCase): <NEW_LINE> <INDENT> PREREQUISITES = ["CreateDatabase"] <NEW_LINE> def execute(self): <NEW_LINE> <INDENT> app = self.config.app <NEW_LINE> rv = decoder(app.get('/equipment')) <NEW_LINE> self.info(rv) <NEW_LINE> self.assertTrue(rv[0][1] == 'TestEquipment') <NEW_LINE> self.passed("Passed all assertions.")
Purpose ------- Get the equipment list resource. Pass Criteria ------------- The resource is fetched without error.
6259907756ac1b37e63039a5
class NomenclateException(Exception): <NEW_LINE> <INDENT> pass
Base Tabulator exception.
62599077cc0a2c111447c794
class ActNorm1d(ActNormNd): <NEW_LINE> <INDENT> def _get_spatial_ndims(self) -> int: <NEW_LINE> <INDENT> return 1
1D convolutional ActNorm flow.
62599077379a373c97d9a9a9
class Employee(DB.Model, Randomizer): <NEW_LINE> <INDENT> id = DB.Column(DB.Integer, primary_key=True) <NEW_LINE> department_id = DB.Column(DB.Integer, DB.ForeignKey( 'department.id', ondelete='CASCADE'), nullable=False) <NEW_LINE> name = DB.Column(DB.String(50), nullable=False) <NEW_LINE> birthdate = DB.Column(DB.Date) <NEW_LINE> salary = DB.Column(DB.Integer, nullable=False) <NEW_LINE> @classmethod <NEW_LINE> def random(cls): <NEW_LINE> <INDENT> fake = Faker() <NEW_LINE> fake.add_provider(date_time) <NEW_LINE> return Employee( name=fake.name(), birthdate=fake.date_between(start_date='-50y', end_date='-18y'), salary=fake.random_int(MIN_SALARY, MAX_SALARY, step=1), department_id=fake.random_element(elements=tuple( d.id for d in Operator.get_all(Department))) ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'<Employee {self.name}>'
ORM representation for 'employee' table in the database. This class is, basically, a relation schema for 'employee' table. An instance of the class represents a row in the table. Attributes: id: A unique identifier for given entity in DB. name: A string corresponding to employee's name. birthdate: A date object representing employee's birthdate. salary: An integer representing employee's salary. department_id: An integer id of the department where employee works.
625990775fdd1c0f98e5f904
class FlavorsAdminNegativeTestJSON(base.BaseV2ComputeAdminTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def resource_setup(cls): <NEW_LINE> <INDENT> super(FlavorsAdminNegativeTestJSON, cls).resource_setup() <NEW_LINE> if not test.is_extension_enabled('OS-FLV-EXT-DATA', 'compute'): <NEW_LINE> <INDENT> msg = "OS-FLV-EXT-DATA extension not enabled." <NEW_LINE> raise cls.skipException(msg) <NEW_LINE> <DEDENT> cls.client = cls.os_adm.flavors_client <NEW_LINE> cls.user_client = cls.os.flavors_client <NEW_LINE> cls.flavor_name_prefix = 'test_flavor_' <NEW_LINE> cls.ram = 512 <NEW_LINE> cls.vcpus = 1 <NEW_LINE> cls.disk = 10 <NEW_LINE> cls.ephemeral = 10 <NEW_LINE> cls.swap = 1024 <NEW_LINE> cls.rxtx = 2 <NEW_LINE> <DEDENT> @test.attr(type=['negative', 'gate']) <NEW_LINE> def test_get_flavor_details_for_deleted_flavor(self): <NEW_LINE> <INDENT> flavor_name = data_utils.rand_name(self.flavor_name_prefix) <NEW_LINE> resp, flavor = self.client.create_flavor(flavor_name, self.ram, self.vcpus, self.disk, '', ephemeral=self.ephemeral, swap=self.swap, rxtx=self.rxtx) <NEW_LINE> new_flavor_id = flavor['id'] <NEW_LINE> resp_delete, body = self.client.delete_flavor(new_flavor_id) <NEW_LINE> self.assertEqual(200, resp.status) <NEW_LINE> self.assertEqual(202, resp_delete.status) <NEW_LINE> resp, flavor = self.client.get_flavor_details(new_flavor_id) <NEW_LINE> self.assertEqual(resp.status, 200) <NEW_LINE> self.assertEqual(flavor['name'], flavor_name) <NEW_LINE> resp, flavors = self.client.list_flavors_with_detail() <NEW_LINE> self.assertEqual(resp.status, 200) <NEW_LINE> flag = True <NEW_LINE> for flavor in flavors: <NEW_LINE> <INDENT> if flavor['name'] == flavor_name: <NEW_LINE> <INDENT> flag = False <NEW_LINE> <DEDENT> <DEDENT> self.assertTrue(flag) <NEW_LINE> <DEDENT> @test.attr(type=['negative', 'gate']) <NEW_LINE> def test_create_flavor_as_user(self): <NEW_LINE> <INDENT> flavor_name = data_utils.rand_name(self.flavor_name_prefix) <NEW_LINE> new_flavor_id = str(uuid.uuid4()) <NEW_LINE> self.assertRaises(exceptions.Unauthorized, self.user_client.create_flavor, flavor_name, self.ram, self.vcpus, self.disk, new_flavor_id, ephemeral=self.ephemeral, swap=self.swap, rxtx=self.rxtx) <NEW_LINE> <DEDENT> @test.attr(type=['negative', 'gate']) <NEW_LINE> def test_delete_flavor_as_user(self): <NEW_LINE> <INDENT> self.assertRaises(exceptions.Unauthorized, self.user_client.delete_flavor, self.flavor_ref_alt)
Tests Flavors API Create and Delete that require admin privileges
625990774e4d562566373d88