code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class URLFetcher: <NEW_LINE> <INDENT> def fetch_urls(self): <NEW_LINE> <INDENT> print("...fetch URLs one day")
Fetch the damned URLs from somewhere
6259907de1aae11d1e7cf53a
class TestMain(unittest.TestCase): <NEW_LINE> <INDENT> def test_communicate(self): <NEW_LINE> <INDENT> app = backcast.main.BackCast() <NEW_LINE> app.lexicaliser = mock.MagicMock() <NEW_LINE> app.aggregator = mock.MagicMock() <NEW_LINE> app.realiser = mock.MagicMock() <NEW_LINE> app.communicate(mock.MagicMock()) <NEW_LINE> assert app.lexicaliser.called
Test that `main` runs and all packages/modules load.
6259907d1b99ca400229025e
class GeonameViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Geoname.objects.all() <NEW_LINE> serializer_class = GeonameSerializer
API endpoint that allows resumes to be created, viewed and edited.
6259907df548e778e596cfe3
class NoInputOp(Op): <NEW_LINE> <INDENT> __props__ = () <NEW_LINE> def make_node(self): <NEW_LINE> <INDENT> return Apply(self, [], [MyType('test')()]) <NEW_LINE> <DEDENT> def perform(self, node, inputs, output_storage): <NEW_LINE> <INDENT> output_storage[0][0] = 'test Op no input'
An Op to test the corner-case of an Op with no input.
6259907d5fdd1c0f98e5f9d1
class VariableDetail(models.Model): <NEW_LINE> <INDENT> process_instance = models.ForeignKey('camunda.ProcessInstance', on_delete=models.CASCADE, related_name='variables') <NEW_LINE> variable = models.ForeignKey(Variable, related_name='+', on_delete=models.PROTECT) <NEW_LINE> value = models.CharField(max_length=50) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.value
Camunda Variable Instance Model
6259907d56b00c62f0fb4326
class UcStateProperties(object): <NEW_LINE> <INDENT> swagger_types = { 'update_uc_state_properties': 'bool' } <NEW_LINE> attribute_map = { 'update_uc_state_properties': 'update_uc_state_properties' } <NEW_LINE> def __init__(self, update_uc_state_properties=True): <NEW_LINE> <INDENT> self._update_uc_state_properties = None <NEW_LINE> self.discriminator = None <NEW_LINE> if update_uc_state_properties is not None: <NEW_LINE> <INDENT> self.update_uc_state_properties = update_uc_state_properties <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def update_uc_state_properties(self): <NEW_LINE> <INDENT> return self._update_uc_state_properties <NEW_LINE> <DEDENT> @update_uc_state_properties.setter <NEW_LINE> def update_uc_state_properties(self, update_uc_state_properties): <NEW_LINE> <INDENT> self._update_uc_state_properties = update_uc_state_properties <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(UcStateProperties, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, UcStateProperties): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259907dd486a94d0ba2da09
class ProcessThreadsStarter(Thread): <NEW_LINE> <INDENT> def __init__(self, server, patterns): <NEW_LINE> <INDENT> Thread.__init__(self) <NEW_LINE> self.server = server <NEW_LINE> self.patterns = patterns <NEW_LINE> self.name = "ProcessThreadStarter" <NEW_LINE> self.lock = self.server.lock <NEW_LINE> self.children = [] <NEW_LINE> self.messages = None <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> for t in threading.enumerate(): <NEW_LINE> <INDENT> if t.name == self.name and self != t: <NEW_LINE> <INDENT> logging.info('Previous {0} is still running, current will now close.'.format(self.name)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> for i in range(self.server.reconnect_attempts + 1): <NEW_LINE> <INDENT> self.lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self.server.login_imap() <NEW_LINE> self.messages = self.server.receive_mail() <NEW_LINE> self.server.logout_imap() <NEW_LINE> break <NEW_LINE> <DEDENT> except SMTPServerDisconnected as e: <NEW_LINE> <INDENT> if i == self.server.reconnect_attempts: <NEW_LINE> <INDENT> logging.critical('Could not connect to the IMAP server!') <NEW_LINE> raise ShutdownException(10) <NEW_LINE> <DEDENT> time.sleep(30) <NEW_LINE> continue <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.lock.release() <NEW_LINE> <DEDENT> <DEDENT> if len(self.messages) == 0: <NEW_LINE> <INDENT> logging.info("No instructions were received!") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info("{0} instructions were received!".format(len(self.messages))) <NEW_LINE> <DEDENT> for message in self.messages: <NEW_LINE> <INDENT> new_thread = MessageProcessThread(message, self.patterns, self.server, self.lock) <NEW_LINE> new_thread.start() <NEW_LINE> self.children.append(new_thread) <NEW_LINE> <DEDENT> for f in self.children: <NEW_LINE> <INDENT> if f.is_alive(): <NEW_LINE> <INDENT> f.join() <NEW_LINE> <DEDENT> <DEDENT> if len(self.server.unsent_emails) > 0: <NEW_LINE> <INDENT> self.lock.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self.server.login_smtp() <NEW_LINE> for e in self.server.unsent_emails: <NEW_LINE> <INDENT> self.server.send_email(e) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> functions.save_emails_to_file(self.server.unsent_emails, self.server.unsent_save_location, e) <NEW_LINE> raise ShutdownException(12) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.lock.release()
Thread for starting a MessageProcessThread for each email. Prevents the main program flow from becoming too blocked
6259907d23849d37ff852b0b
class Guarderia(object): <NEW_LINE> <INDENT> def __init__(self, nombre, usuarios, estacionamiento, sys_def_fac): <NEW_LINE> <INDENT> self.nombre = nombre <NEW_LINE> self.usuarios = usuarios <NEW_LINE> self.estacionamiento = estacionamiento <NEW_LINE> self.sys_def_fac = sys_def_fac
docstring for Guarderia.
6259907d091ae35668706691
class EnRDRTree(PosTaggingRDRTree): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root = None <NEW_LINE> <DEDENT> def tagRawSentence(self, DICT, rawLine): <NEW_LINE> <INDENT> line = EnInitTagger4Sentence(DICT, rawLine) <NEW_LINE> sen = '' <NEW_LINE> startWordTags = line.split() <NEW_LINE> for i in xrange(len(startWordTags)): <NEW_LINE> <INDENT> fwObject = FWObject.getFWObject(startWordTags, i) <NEW_LINE> word, tag = getWordTag(startWordTags[i]) <NEW_LINE> node = self.findFiredNode(fwObject) <NEW_LINE> sen += node.conclusion + " " <NEW_LINE> <DEDENT> return sen.strip() <NEW_LINE> <DEDENT> def tagRawCorpus(self, DICT, inputStr): <NEW_LINE> <INDENT> outStr = ""; <NEW_LINE> outStr += self.tagRawSentence(DICT, inputStr) + "\n"; <NEW_LINE> outList = outStr.split(); <NEW_LINE> return outList;
RDRPOSTagger for English
6259907d3d592f4c4edbc888
class Command(BaseCommand): <NEW_LINE> <INDENT> help = 'Convert Ooyala IDs to corresponding Brightcove IDs in Xblock and embeds' <NEW_LINE> batch_size = 100 <NEW_LINE> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( "--user-id", dest="user_id", help="Staff User ID", ), <NEW_LINE> parser.add_argument( "--course-ids", dest="course_ids", help="Course IDs to process Ooyala instances in", ), <NEW_LINE> parser.add_argument( "--revert", dest="revert", action="store_true", default=False, help="Revert all the converted Ids back to previous state" ), <NEW_LINE> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> course_ids = options.get('course_ids') <NEW_LINE> user_id = options.get('user_id') <NEW_LINE> revert = options.get('revert') <NEW_LINE> if not user_id: <NEW_LINE> <INDENT> raise CommandError("--user-id parameter is missing. Please provide a staff user id") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> User.objects.get(id=user_id) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> raise CommandError("Invalid user id: {}. Please provide a valid staff user id".format(user_id)) <NEW_LINE> <DEDENT> <DEDENT> if course_ids: <NEW_LINE> <INDENT> course_ids = course_ids.split(',') <NEW_LINE> logger.info('Ooyala IDs update task queued for Courses: {}'.format(course_ids)) <NEW_LINE> convert_ooyala_to_bcove.delay( staff_user_id=user_id, course_ids=course_ids, revert=revert, callback="conversion_script_success_callback", ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> open_courses = CourseOverview.objects.filter( Q(end__gte=datetime.datetime.today().replace(tzinfo=UTC)) | Q(end__isnull=True) ).values_list('id', flat=True) <NEW_LINE> logger.info('Ooyala IDs update command: queuing task for {} Open Courses'.format(len(open_courses))) <NEW_LINE> for course_ids in self.chunks(open_courses, self.batch_size): <NEW_LINE> <INDENT> convert_ooyala_to_bcove.delay( staff_user_id=user_id, course_ids=course_ids, revert=revert, callback="conversion_script_success_callback", ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def chunks(self, l, n): <NEW_LINE> <INDENT> for i in range(0, len(l), n): <NEW_LINE> <INDENT> yield l[i:i + n]
Command to update Ooyala Xblock Content IDs to corresponding Brightcove IDs
6259907df9cc0f698b1c5ff6
class Case(models.Model): <NEW_LINE> <INDENT> location = models.CharField(_('location'), max_length=100, blank=True, null=True, ) <NEW_LINE> city = models.CharField(_('city'), max_length=100, blank=True, null=True, ) <NEW_LINE> state = models.CharField(_('state'), max_length=50, blank=True, null=True, ) <NEW_LINE> zipcode = models.CharField(_('zipcode'), max_length=15, blank=True, null=True, ) <NEW_LINE> geopin = models.CharField(_('geopin'), max_length=15, blank=True, null=True, help_text=_('The geopin, which should match the geopin field in the ' 'parcel database'), ) <NEW_LINE> geoaddress = models.CharField(_('geoaddress'), max_length=100, blank=True, null=True, help_text=_("This seems incorrect, but we'll hold onto it"), ) <NEW_LINE> geom = models.PointField(_('geom'), help_text=_('The point given by xpos and ypos'), ) <NEW_LINE> caseid = models.CharField(_('caseid'), max_length=15, ) <NEW_LINE> caseno = models.CharField(_('caseno'), max_length=15, ) <NEW_LINE> o_c = models.CharField(_('o_c'), max_length=15, help_text=_('open / closed'), ) <NEW_LINE> stage = models.CharField(_('stage'), max_length=150, ) <NEW_LINE> statdate = models.DateTimeField(_('statdate'), blank=True, null=True, ) <NEW_LINE> keystatus = models.TextField(_('keystatus'), blank=True, null=True, ) <NEW_LINE> initinspection = models.DateTimeField(_('initinspection'), blank=True, null=True, ) <NEW_LINE> initinspresult = models.CharField(_('stage'), max_length=150, blank=True, null=True, ) <NEW_LINE> prevhearingdate = models.DateTimeField(_('prevhearingdate'), blank=True, null=True, ) <NEW_LINE> prevhearingresult = models.CharField(_('stage'), max_length=150, blank=True, null=True, ) <NEW_LINE> casefiled = models.DateTimeField(_('casefiled'), blank=True, null=True, ) <NEW_LINE> lastupload = models.DateTimeField(_('lastupload'), blank=True, null=True, ) <NEW_LINE> objects = models.GeoManager()
A Code Enforcement case as represented in "CE Active Pipeline": https://data.nola.gov/dataset/CE-Active-Pipeline/8pqz-ftzc
6259907d4428ac0f6e659f82
class MnliProcessor(DataProcessor): <NEW_LINE> <INDENT> def get_train_examples(self, data_dir): <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") <NEW_LINE> <DEDENT> def get_dev_examples(self, data_dir, eval_set="MNLI-m"): <NEW_LINE> <INDENT> if eval_set is None or eval_set == "MNLI-m": <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "dev_matched.tsv")), "dev") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert eval_set == "MNLI-mm" <NEW_LINE> return self._create_examples( self._read_tsv(os.path.join(data_dir, "dev_mismatched.tsv")), "dev") <NEW_LINE> <DEDENT> <DEDENT> def get_test_examples(self, data_dir, eval_set="MNLI-m"): <NEW_LINE> <INDENT> if eval_set is None or eval_set == "MNLI-m": <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "test_matched.tsv")), "test") <NEW_LINE> <DEDENT> elif eval_set == "MNLI-mm": <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "test_mismatched.tsv")), "test") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert eval_set == "AX" <NEW_LINE> return self._create_examples( self._read_tsv(os.path.join(data_dir, "ax.tsv")), "ax") <NEW_LINE> <DEDENT> <DEDENT> def get_labels(self): <NEW_LINE> <INDENT> return ["contradiction", "entailment", "neutral"] <NEW_LINE> <DEDENT> def _create_examples(self, lines, set_type): <NEW_LINE> <INDENT> examples = [] <NEW_LINE> for (i, line) in enumerate(lines): <NEW_LINE> <INDENT> if i == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> guid = "%s-%s" % (set_type, line[0]) <NEW_LINE> if set_type == "ax": <NEW_LINE> <INDENT> text_a = line[1] <NEW_LINE> text_b = line[2] <NEW_LINE> label = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text_a = line[8] <NEW_LINE> text_b = line[9] <NEW_LINE> label = None if set_type == "test" else line[-1] <NEW_LINE> <DEDENT> examples.append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) <NEW_LINE> <DEDENT> return examples
Processor for the MultiNLI data set (GLUE version).
6259907daad79263cf43020d
class ConditionSet(Set): <NEW_LINE> <INDENT> def __new__(cls, condition, base_set): <NEW_LINE> <INDENT> return Basic.__new__(cls, condition, base_set) <NEW_LINE> <DEDENT> condition = property(lambda self: self.args[0]) <NEW_LINE> base_set = property(lambda self: self.args[1]) <NEW_LINE> def contains(self, other): <NEW_LINE> <INDENT> return And(self.condition(other), self.base_set.contains(other))
Set of elements which satisfies a given condition. {x | condition(x) is True for x in S} Examples ======== >>> from sympy import Symbol, S, ConditionSet, Lambda, pi, Eq, sin, Interval >>> x = Symbol('x') >>> sin_sols = ConditionSet(Lambda(x, Eq(sin(x), 0)), Interval(0, 2*pi)) >>> 2*pi in sin_sols True >>> pi/2 in sin_sols False >>> 3*pi in sin_sols False >>> 5 in ConditionSet(Lambda(x, x**2 > 4), S.Reals) True
6259907d3617ad0b5ee07ba1
class RankForms(messages.Message): <NEW_LINE> <INDENT> items = messages.MessageField(RankForm, 1, repeated=True)
Users, sorted by lowest average guesses
6259907d3346ee7daa33838b
class Or(object): <NEW_LINE> <INDENT> def __init__(self, *predicates): <NEW_LINE> <INDENT> self.predicates = predicates <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Or(%s)' % ', '.join(str(p) for p in self.predicates) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<hunter.predicates.Or: predicates=%r>' % (self.predicates,) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return ( isinstance(other, Or) and self.predicates == other.predicates ) <NEW_LINE> <DEDENT> def __call__(self, event): <NEW_LINE> <INDENT> for predicate in self.predicates: <NEW_LINE> <INDENT> if predicate(event): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __or__(self, other): <NEW_LINE> <INDENT> return Or(*chain(self.predicates, other.predicates if isinstance(other, Or) else (other,))) <NEW_LINE> <DEDENT> def __and__(self, other): <NEW_LINE> <INDENT> return And(self, other) <NEW_LINE> <DEDENT> def __invert__(self): <NEW_LINE> <INDENT> return Not(self) <NEW_LINE> <DEDENT> def __ror__(self, other): <NEW_LINE> <INDENT> return Or(other, *self.predicates) <NEW_LINE> <DEDENT> def __rand__(self, other): <NEW_LINE> <INDENT> return And(other, self)
Logical disjunction. Returns ``True`` after the first sub-predicate that returns ``True``.
6259907d55399d3f05627f68
class MainData: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.action=dict() <NEW_LINE> self.canvas=dict() <NEW_LINE> self.control={"QLabel": [], "QTabWidget": [], "QPushButton": [], "QTextEdit": [], "QRadioButton": [], "QComboBox": [], "QSpinBox": [], "QTableWidget": [], "QLCDNumber": [], "Clickable_Lable":[],"QProgressBar":[],"RedBall":[]} <NEW_LINE> self.controlData=dict() <NEW_LINE> <DEDENT> def controlClear(self): <NEW_LINE> <INDENT> self.control = {"QLabel": [], "QTabWidget": [], "QPushButton": [], "QTextEdit": [], "QRadioButton": [], "QComboBox": [], "QSpinBox": [], "QTableWidget": [], "QLCDNumber": [], "Clickable_Lable":[],"QProgressBar":[],"RedBall":[]} <NEW_LINE> <DEDENT> def addFrame(self, imageName): <NEW_LINE> <INDENT> self.controlData[imageName] = dict() <NEW_LINE> self.controlData[imageName]["QRadioButton"] = {"isChecked": []} <NEW_LINE> self.controlData[imageName]["QComboBox"] = {"itemText": [], "currentIndex": []} <NEW_LINE> self.controlData[imageName]["QSpinBox"] = {"value": []} <NEW_LINE> self.controlData[imageName]["QTableWidget"] = {"data": []} <NEW_LINE> self.controlData[imageName]["QLCDNumber"] = {"value": []} <NEW_LINE> self.controlData[imageName]["save"] = [] <NEW_LINE> <DEDENT> def controlDataClear(self, imageName): <NEW_LINE> <INDENT> self.controlData[imageName]["QRadioButton"]["isChecked"] = [] <NEW_LINE> self.controlData[imageName]["QComboBox"]["itemText"] = [] <NEW_LINE> self.controlData[imageName]["QComboBox"]["currentIndex"] = [] <NEW_LINE> self.controlData[imageName]["QSpinBox"]["value"] = [] <NEW_LINE> self.controlData[imageName]["QTableWidget"]["data"] = [] <NEW_LINE> self.controlData[imageName]["QLCDNumber"]["value"] = [] <NEW_LINE> self.controlData[imageName]["save"] = []
frame of page data about Pyqt5
6259907d1f5feb6acb16464c
class UpdateMirror1Test(BaseTest): <NEW_LINE> <INDENT> sortOutput = True <NEW_LINE> longTest = False <NEW_LINE> fixtureCmds = [ "aptly -architectures=i386,amd64 mirror create --ignore-signatures varnish https://packagecloud.io/varnishcache/varnish30/debian/ wheezy main", ] <NEW_LINE> runCmd = "aptly mirror update --ignore-signatures varnish" <NEW_LINE> outputMatchPrepare = filterOutRedirects
update mirrors: regular update
6259907d099cdd3c63676123
class Neuron(ComponentBase): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> ncomp = kwargs.pop('ncomp', 0) <NEW_LINE> presyn = kwargs.pop('presyn', 0) <NEW_LINE> ComponentBase.__init__(self, *args, **kwargs) <NEW_LINE> self.attrs['ontology'] = 'cno_0000020' <NEW_LINE> self.presyn = presyn <NEW_LINE> self.compartments = [] <NEW_LINE> for ii in range(ncomp): <NEW_LINE> <INDENT> child = Compartment('compartment_{}'.format(ii), parent=self) <NEW_LINE> self.compartments.append(child)
Base class for all the cell types.
6259907d5166f23b2e244e2c
class virtualChannelOpen_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRUCT, 'success', (TReturnVirtualChannelOpen, TReturnVirtualChannelOpen.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = TReturnVirtualChannelOpen() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('virtualChannelOpen_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
6259907d60cbc95b06365a97
class Heap(Sequence): <NEW_LINE> <INDENT> def __init__(self, init_data=None, max_heap=False): <NEW_LINE> <INDENT> self._vals = [] <NEW_LINE> self._is_min_heap = not max_heap <NEW_LINE> if init_data is not None: <NEW_LINE> <INDENT> self.extend(init_data) <NEW_LINE> <DEDENT> <DEDENT> def __contains__(self, value): <NEW_LINE> <INDENT> return value in self._vals <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._vals) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._vals) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Heap ' + repr(self._vals) + '>' <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if index >= len(self): <NEW_LINE> <INDENT> raise IndexError("index out of range") <NEW_LINE> <DEDENT> return self._vals[index] <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._vals = [] <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> new = Heap() <NEW_LINE> new._vals = self._vals[:] <NEW_LINE> new._is_min_heap = self._is_min_heap <NEW_LINE> return new <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if not self: <NEW_LINE> <INDENT> raise IndexError("peek/pop from an empty container") <NEW_LINE> <DEDENT> top_val = self.peek() <NEW_LINE> last_idx = len(self) - 1 <NEW_LINE> self._swap(0, last_idx) <NEW_LINE> self._vals.pop() <NEW_LINE> if self: <NEW_LINE> <INDENT> down_heap(self, 0) <NEW_LINE> <DEDENT> return top_val <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> if not self: <NEW_LINE> <INDENT> raise IndexError("peek/pop from an empty container") <NEW_LINE> <DEDENT> return self[0] <NEW_LINE> <DEDENT> def push(self, value): <NEW_LINE> <INDENT> self._vals.append(value) <NEW_LINE> idx = len(self) - 1 <NEW_LINE> up_heap(self, idx) <NEW_LINE> <DEDENT> def poppush(self, value): <NEW_LINE> <INDENT> top_val = self.peek() <NEW_LINE> self._vals[0] = value <NEW_LINE> down_heap(self, 0) <NEW_LINE> return top_val <NEW_LINE> <DEDENT> def pushpop(self, value): <NEW_LINE> <INDENT> top_val = self.peek() <NEW_LINE> if self._upper_eq(value, top_val): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> self._vals[0] = value <NEW_LINE> down_heap(self, 0) <NEW_LINE> return top_val <NEW_LINE> <DEDENT> def extend(self, iterable): <NEW_LINE> <INDENT> self._vals.extend(iterable) <NEW_LINE> len_half = len(self) // 2 <NEW_LINE> for idx in reversed(xrange(len_half)): <NEW_LINE> <INDENT> down_heap(self, idx) <NEW_LINE> <DEDENT> <DEDENT> def _idx2val(self, idx): <NEW_LINE> <INDENT> return self[idx] <NEW_LINE> <DEDENT> def _upper_eq(self, i, j): <NEW_LINE> <INDENT> if self._is_min_heap: <NEW_LINE> <INDENT> return i <= j <NEW_LINE> <DEDENT> return i >= j <NEW_LINE> <DEDENT> def _swap(self, i1, i2): <NEW_LINE> <INDENT> self._vals[i1], self._vals[i2] = self[i2], self[i1]
A basic heap data structure. This is a binary heap. Heap is a subclass of collections.Sequence, having an interface similar as Python built-in list. Usage: >>> heap = Heap(max_heap=True) # create empty max-heap >>> heap = Heap() # create empty min-heap >>> heap.push(10) # insert value into the heap >>> heap.extend([0, 7]) # insert values from iterable >>> heap.peek() # Get the current top element 0 >>> heap.pop() # pop and return the current top element 0 >>> heap.peek() # Get the current top element 7
6259907d5fdd1c0f98e5f9d3
class MicroExitPolicy(ExitPolicy): <NEW_LINE> <INDENT> def __init__(self, policy): <NEW_LINE> <INDENT> self._policy = policy <NEW_LINE> if policy.startswith('accept'): <NEW_LINE> <INDENT> self.is_accept = True <NEW_LINE> <DEDENT> elif policy.startswith('reject'): <NEW_LINE> <INDENT> self.is_accept = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("A microdescriptor exit policy must start with either 'accept' or 'reject': %s" % policy) <NEW_LINE> <DEDENT> policy = policy[6:] <NEW_LINE> if not policy.startswith(' ') or (len(policy) - 1 != len(policy.lstrip())): <NEW_LINE> <INDENT> raise ValueError('A microdescriptor exit policy should have a space separating accept/reject from its port list: %s' % self._policy) <NEW_LINE> <DEDENT> policy = policy[1:] <NEW_LINE> rules = [] <NEW_LINE> for port_entry in policy.split(','): <NEW_LINE> <INDENT> if '-' in port_entry: <NEW_LINE> <INDENT> min_port, max_port = port_entry.split('-', 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> min_port = max_port = port_entry <NEW_LINE> <DEDENT> if not stem.util.connection.is_valid_port(min_port) or not stem.util.connection.is_valid_port(max_port): <NEW_LINE> <INDENT> raise ValueError("'%s' is an invalid port range" % port_entry) <NEW_LINE> <DEDENT> rules.append(MicroExitPolicyRule(self.is_accept, int(min_port), int(max_port))) <NEW_LINE> <DEDENT> super(MicroExitPolicy, self).__init__(*rules) <NEW_LINE> self._is_allowed_default = not self.is_accept <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._policy <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(str(self)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, MicroExitPolicy): <NEW_LINE> <INDENT> return str(self) == str(other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Exit policy provided by the microdescriptors. This is a distilled version of a normal :class:`~stem.exit_policy.ExitPolicy` contains, just consisting of a list of ports that are either accepted or rejected. For instance... :: accept 80,443 # only accepts common http ports reject 1-1024 # only accepts non-privileged ports Since these policies are a subset of the exit policy information (lacking IP ranges) clients can only use them to guess if a relay will accept traffic or not. To quote the `dir-spec <https://gitweb.torproject.org/torspec.git/tree/dir-spec.txt>`_ (section 3.2.1)... :: With microdescriptors, clients don't learn exact exit policies: clients can only guess whether a relay accepts their request, try the BEGIN request, and might get end-reason-exit-policy if they guessed wrong, in which case they'll have to try elsewhere. :var bool is_accept: **True** if these are ports that we accept, **False** if they're ports that we reject :param str policy: policy string that describes this policy
6259907d2c8b7c6e89bd5239
class DeploymentSimulationArtifacts(object): <NEW_LINE> <INDENT> def __init__(self, resource_cache: Dict, available_resources: KubernetesAvailableResourceTypes): <NEW_LINE> <INDENT> self._resource_cache: Dict = resource_cache <NEW_LINE> self._available_resources: KubernetesAvailableResourceTypes = available_resources <NEW_LINE> <DEDENT> def api_resource_types(self) -> KubernetesAvailableResourceTypes: <NEW_LINE> <INDENT> return self._available_resources <NEW_LINE> <DEDENT> def resource_cache(self) -> Dict: <NEW_LINE> <INDENT> return self._resource_cache
Simple class defining all the artifacts required for testing Viya deployment report utilities against simulated deployments with a mocked ingress controller.
6259907d92d797404e389886
class SPM(six.with_metaclass(parsers.OptionParserMeta, parsers.OptionParser, parsers.ConfigDirMixIn, parsers.LogLevelMixIn, parsers.MergeConfigMixIn)): <NEW_LINE> <INDENT> VERSION = salt.version.__version__ <NEW_LINE> _config_filename_ = 'spm' <NEW_LINE> _default_logging_logfile_ = os.path.join(syspaths.LOGS_DIR, 'spm') <NEW_LINE> def setup_config(self): <NEW_LINE> <INDENT> return salt.config.spm_config(self.get_config_file_path()) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> import salt.client.spm <NEW_LINE> self.parse_args() <NEW_LINE> self.setup_logfile_logger() <NEW_LINE> client = salt.client.spm.SPMClient(self.config) <NEW_LINE> client.run(self.args)
The cli parser object used to fire up the salt spm system.
6259907d7d43ff248742813f
class TestCommand(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCommand(self): <NEW_LINE> <INDENT> pass
Command unit test stubs
6259907d3317a56b869bf270
class GbFullCnnKimUniversity(gb_full_university.GbFullUniversity): <NEW_LINE> <INDENT> def __init__(self, model_dir, workspace, dataset_wkspc, text_window=TextWindow.beginning, starting_idx=0): <NEW_LINE> <INDENT> super().__init__(model_dir, workspace, dataset_wkspc, text_window=text_window, starting_idx=starting_idx) <NEW_LINE> <DEDENT> def get_model(self, graph): <NEW_LINE> <INDENT> pretrained = None <NEW_LINE> if FLAGS.use_pretrained_embeddings: <NEW_LINE> <INDENT> pretrained = input_util.get_pretrained_vectors(self.vocab, FLAGS.pretrained_embedding_model, FLAGS.embedding_size) <NEW_LINE> <DEDENT> return cnn_kim.CnnKim(len(self.vocab), len(self.subjects), FLAGS.document_length, pretrained) <NEW_LINE> <DEDENT> def perform_training_run(self, session, model, training_batch): <NEW_LINE> <INDENT> feed_dict = { model.input_x: training_batch[1], model.input_y: training_batch[0], model.dropout_keep_prob: FLAGS.dropout} <NEW_LINE> _, summary, step, batch_loss, batch_accuracy, batch_targets, batch_predictions = session.run( [model.train_op, model.summary_op, model.global_step, model.loss, model.accuracy, model.targets, model.predictions], feed_dict) <NEW_LINE> return summary, step, batch_loss, batch_accuracy, batch_targets, batch_predictions <NEW_LINE> <DEDENT> def perform_evaluation_run(self, session, model, eval_batch): <NEW_LINE> <INDENT> feed_dict = { model.input_x: eval_batch[1], model.input_y: eval_batch[0], model.dropout_keep_prob: 1} <NEW_LINE> summary, batch_loss, batch_accuracy, batch_targets, batch_predictions = session.run( [model.summary_op, model.loss, model.accuracy, model.targets, model.predictions], feed_dict) <NEW_LINE> return summary, batch_loss, batch_accuracy, batch_targets, batch_predictions
This is an AI Lit university for training CNN-Kim on the Gutenberg Full text dataset.
6259907d32920d7e50bc7a96
class Exit(Exception): <NEW_LINE> <INDENT> def __init__(self, code): <NEW_LINE> <INDENT> self.code = code
Use an exit exception to end program execution. We don't use sys.exit because it is a little problematic with RPython.
6259907d4f6381625f19a1d8
class Impressions(ListView): <NEW_LINE> <INDENT> queryset = Impression.objects.all() <NEW_LINE> template_name = 'impressions_all.html'
View for all impression images.
6259907d4a966d76dd5f093b
class Test_osmotic_pitzer(unittest.TestCase, pyEQL.CustomAssertions): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.s1 = pyEQL.Solution([["Na+", "0.1 mol/L"], ["Cl-", "0.1 mol/L"]]) <NEW_LINE> self.tol = 0.05 <NEW_LINE> <DEDENT> def test_osmotic_pitzer_coeff_units(self): <NEW_LINE> <INDENT> result = self.s1.get_osmotic_coefficient().dimensionality <NEW_LINE> self.assertEqual(result, "") <NEW_LINE> <DEDENT> def test_activity_pitzer_magnitude(self): <NEW_LINE> <INDENT> result = self.s1.get_osmotic_coefficient() <NEW_LINE> self.assertGreaterEqual(result, 0) <NEW_LINE> <DEDENT> def test_osmotic_pitzer_ammoniumnitrate(self): <NEW_LINE> <INDENT> conc_list = [0.25, 0.5, 0.75, 1, 1.5, 2] <NEW_LINE> pub_osmotic_coeff = [0.86, 0.855, 0.83, 0.825, 0.80, 0.78] <NEW_LINE> for i in range(len(conc_list)): <NEW_LINE> <INDENT> with self.subTest(conc=conc_list[i]): <NEW_LINE> <INDENT> conc = str(conc_list[i]) + "mol/kg" <NEW_LINE> sol = pyEQL.Solution() <NEW_LINE> sol.add_solute("NH4+", conc) <NEW_LINE> sol.add_solute("NO3-", conc) <NEW_LINE> result = sol.get_osmotic_coefficient() <NEW_LINE> expected = pub_osmotic_coeff[i] <NEW_LINE> self.assertWithinExperimentalError(result, expected, self.tol) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_osmotic_pitzer_coppersulfate(self): <NEW_LINE> <INDENT> conc_list = [0.25, 0.5, 0.75, 1] <NEW_LINE> pub_osmotic_coeff = [0.5, 0.485, 0.48, 0.485, 0.5] <NEW_LINE> for i in range(len(conc_list)): <NEW_LINE> <INDENT> with self.subTest(conc=conc_list[i]): <NEW_LINE> <INDENT> conc = str(conc_list[i]) + "mol/kg" <NEW_LINE> sol = pyEQL.Solution() <NEW_LINE> sol.add_solute("Cu+2", conc) <NEW_LINE> sol.add_solute("SO4-2", conc) <NEW_LINE> result = sol.get_osmotic_coefficient() <NEW_LINE> expected = pub_osmotic_coeff[i] <NEW_LINE> self.assertWithinExperimentalError(result, expected, self.tol)
test osmotic coefficient based on the Pitzer model ------------------------------------------------
6259907da8370b77170f1e25
class check_user_admins_weak_password(): <NEW_LINE> <INDENT> TITLE = 'Admins Weak Password' <NEW_LINE> CATEGORY = 'Configuration' <NEW_LINE> TYPE = 'nosql' <NEW_LINE> SQL = None <NEW_LINE> verbose = False <NEW_LINE> skip = False <NEW_LINE> result = {} <NEW_LINE> db = None <NEW_LINE> def do_check(self): <NEW_LINE> <INDENT> password_file = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..', 'etc', 'check_user_weak_password.txt') <NEW_LINE> self.result['level'] = 'GREEN' <NEW_LINE> output = '' <NEW_LINE> match = False <NEW_LINE> try: <NEW_LINE> <INDENT> admins = self.db.config()['admins'] <NEW_LINE> for admin in admins: <NEW_LINE> <INDENT> admin_hash = admins[admin] <NEW_LINE> admin_hash_parts = admin_hash.split(',') <NEW_LINE> salt = admin_hash_parts[1] <NEW_LINE> with open(str(password_file), 'r') as passwords: <NEW_LINE> <INDENT> for password in passwords: <NEW_LINE> <INDENT> if '-hashed-' + sha1(password.strip() + salt).hexdigest() + ',' + salt == admin_hash: <NEW_LINE> <INDENT> match = True <NEW_LINE> output += '%s\n' % (admin) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.result['level'] = 'ORANGE' <NEW_LINE> output = 'Problem accessing config, message:\n%s\n' % (e) <NEW_LINE> <DEDENT> if match: <NEW_LINE> <INDENT> self.result['level'] = 'RED' <NEW_LINE> output = 'Weak password found for:\n %s' % (output) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.result['level'] = 'GREEN' <NEW_LINE> output = 'No weak password found.' <NEW_LINE> <DEDENT> self.result['output'] = output <NEW_LINE> return self.result <NEW_LINE> <DEDENT> def __init__(self, parent): <NEW_LINE> <INDENT> print('Performing check: ' + self.TITLE) <NEW_LINE> self.db = parent.db
check_user_admins_weak_password: Admin users with weak passwords.
6259907d26068e7796d4e394
class Date(models.Model): <NEW_LINE> <INDENT> title = models.CharField("Titel", max_length=128, blank=False) <NEW_LINE> start = models.DateField("Beginn", null=True, blank=True) <NEW_LINE> end = models.DateField("Ende", null=True, blank=True) <NEW_LINE> location = models.CharField("Ort", max_length=128, blank=True) <NEW_LINE> latitude = models.FloatField("Breitengrad", max_length=128, blank=True, null=True) <NEW_LINE> longitude = models.FloatField("Längengrad", max_length=128, blank=True, null=True) <NEW_LINE> display_name = models.CharField("Berechneter Standort", max_length=512, blank=True, null=True) <NEW_LINE> description = models.TextField("Beschreibung", blank=True) <NEW_LINE> host = models.CharField("Ausrichter", max_length=128, blank=True) <NEW_LINE> attachment = models.FileField(upload_to="termine/", verbose_name="Anhang", null=True, blank=True) <NEW_LINE> created = models.DateTimeField("Erstellt am", auto_now_add=True) <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = requests.get("https://nominatim.openstreetmap.org/search?q=" + str(self.location) + "&format=json&polygon=1&addressdetails=1").json()[0] <NEW_LINE> self.latitude = data["lat"] <NEW_LINE> self.longitude = data["lon"] <NEW_LINE> self.display_name = data["display_name"] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.latitude = None <NEW_LINE> self.longitude = None <NEW_LINE> self.display_name = None <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_absolute_url(): <NEW_LINE> <INDENT> return reverse('intern:dates') <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Termin" <NEW_LINE> verbose_name_plural = "Termine"
Important dates
6259907d283ffb24f3cf52f5
class RBFVariational(ShiftInvariant): <NEW_LINE> <INDENT> def __init__(self, lenscale=None, learn_lenscale=False, seed=0): <NEW_LINE> <INDENT> self.lenscale_post = None <NEW_LINE> super().__init__(lenscale, learn_lenscale, seed) <NEW_LINE> <DEDENT> def weights(self, input_dim, n_features, dtype=np.float32): <NEW_LINE> <INDENT> self.lenscale, self.lenscale_post = _init_lenscale(self.given_lenscale, self.learn_lenscale, input_dim) <NEW_LINE> dim = (input_dim, n_features) <NEW_LINE> pP_scale = self.__len2std(self.lenscale, n_features) <NEW_LINE> pP = tf.distributions.Normal( loc=tf.zeros(dim), scale=pP_scale) <NEW_LINE> qP_scale = 1.0 / self.lenscale_post <NEW_LINE> if qP_scale.ndim > 0: <NEW_LINE> <INDENT> qP_scale = np.repeat(qP_scale[:, np.newaxis], n_features, axis=1) <NEW_LINE> <DEDENT> qP = norm_posterior(dim=dim, std0=qP_scale, suffix="kernel") <NEW_LINE> KL = kl_sum(qP, pP) <NEW_LINE> e = self._random_state.randn(*dim).astype(dtype) <NEW_LINE> P = qP.mean() + qP.stddev() * e <NEW_LINE> return P, KL <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __len2std(lenscale, n_features): <NEW_LINE> <INDENT> std = tf.tile(1.0 / tf.expand_dims(lenscale, axis=-1), (1, n_features)) <NEW_LINE> return std
Variational Radial basis kernel approximation. This kernel is similar to the RBF kernel, however we learn an independant Gaussian posterior distribution over the kernel weights to sample from. Parameters ---------- lenscale : float, ndarray, optional The length scales of the RBF kernel. This can be a scalar for an isotropic kernel, or a vector of shape (input_dim,) for an automatic relevance detection (ARD) kernel. If not provided, it will be set to ``sqrt(1 / input_dim)`` (this is similar to the 'auto' setting for a scikit learn SVM with a RBF kernel). If learn_lenscale is True, lenscale will be the initial value of the prior precision of the Fourier weight distribution. learn_lenscale : bool, optional Whether to learn the (prior) length scale. If True, the lenscale value provided (or its default) is used for initialisation. seed : int, optional The seed for the internal random number generator. Setting a fixed seed ensures that remaking the tensorflow graph results in the same weights.
6259907daad79263cf43020f
class BatchSystemJob: <NEW_LINE> <INDENT> def __init__(self, job_id, b_name, iter_nr, has_instrumentation, cube_file, item_id, build, benchmark, flavor, max_iter): <NEW_LINE> <INDENT> self.job_id = job_id <NEW_LINE> self.benchmark_name = b_name <NEW_LINE> self.iter_nr = iter_nr <NEW_LINE> self.has_instrumentation = has_instrumentation <NEW_LINE> self.cube_file = cube_file <NEW_LINE> self.item_id = item_id <NEW_LINE> self.build = build <NEW_LINE> self.benchmark = benchmark <NEW_LINE> self.flavor = flavor <NEW_LINE> self.max_iterations = max_iter <NEW_LINE> <DEDENT> def is_instrumented(self): <NEW_LINE> <INDENT> return self.has_instrumentation <NEW_LINE> <DEDENT> def is_first_iteration(self): <NEW_LINE> <INDENT> return self.iter_nr == 0 <NEW_LINE> <DEDENT> def is_iteration(self, number): <NEW_LINE> <INDENT> return self.iter_nr == number <NEW_LINE> <DEDENT> def is_last_iteration(self): <NEW_LINE> <INDENT> return self.iter_nr == self.max_iterations <NEW_LINE> <DEDENT> def get_job_id(self): <NEW_LINE> <INDENT> return self.job_id <NEW_LINE> <DEDENT> def get_benchmark_name(self): <NEW_LINE> <INDENT> return self.benchmark_name <NEW_LINE> <DEDENT> def get_iteration_number(self): <NEW_LINE> <INDENT> return self.iter_nr <NEW_LINE> <DEDENT> def get_cube_file_path(self): <NEW_LINE> <INDENT> return self.cube_file <NEW_LINE> <DEDENT> def get_item_id(self): <NEW_LINE> <INDENT> return self.item_id <NEW_LINE> <DEDENT> def get_build(self): <NEW_LINE> <INDENT> return self.build <NEW_LINE> <DEDENT> def get_benchmark(self): <NEW_LINE> <INDENT> return self.benchmark <NEW_LINE> <DEDENT> def get_flavor(self): <NEW_LINE> <INDENT> return self.flavor
Class holding the description of a batch system job. This class should be independent of the actually used batch system, but still supply enough information for the automation process.
6259907dfff4ab517ebcf26e
class LookupGhcSymbolCmd(gdb.Command): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(LookupGhcSymbolCmd, self).__init__ ("ghc symbol", gdb.COMMAND_USER) <NEW_LINE> <DEDENT> def invoke(self, args, from_tty): <NEW_LINE> <INDENT> addr = int(gdb.parse_and_eval(args)) <NEW_LINE> foundAddr, sym = getLinkerSymbols().lookupNearestSymbol(addr) <NEW_LINE> print("%d bytes into %s (starts at 0x%x)" % (addr - foundAddr, sym, foundAddr))
Lookup the symbol an address falls with (assuming the symbol was loaded by the RTS linker)
6259907d442bda511e95da82
class BasePairTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_base_pair(self): <NEW_LINE> <INDENT> strand = 'A A T G C C T A T G G C' <NEW_LINE> mirror = 'T T A C G G A T A C C G' <NEW_LINE> self.assertEqual(base_pair(strand), mirror)
Tests 'prob_207_easy.py'.
6259907d4f6381625f19a1d9
class BeastSense(Spell): <NEW_LINE> <INDENT> name = "Beast Sense" <NEW_LINE> level = 2 <NEW_LINE> casting_time = "1 action" <NEW_LINE> casting_range = "Touch" <NEW_LINE> components = ('S',) <NEW_LINE> materials = """""" <NEW_LINE> duration = "Concentration, up to 1 hour" <NEW_LINE> ritual = True <NEW_LINE> magic_school = "Divination" <NEW_LINE> classes = ('Druid', 'Ranger')
You touch a willing beast. For the duration of the spell, you can use your action to see through the beast’s eyes and hear what it hears, and continue to do so until you use your action to return to your normal senses.
6259907d55399d3f05627f6c
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA, base.ReleaseTrack.GA) <NEW_LINE> class ServiceManagement(base.Group): <NEW_LINE> <INDENT> def Filter(self, context, args): <NEW_LINE> <INDENT> context['servicemanagement-v1'] = apis.GetClientInstance( 'servicemanagement', 'v1') <NEW_LINE> context['servicemanagement-v1-messages'] = apis.GetMessagesModule( 'servicemanagement', 'v1') <NEW_LINE> context['apikeys-v1'] = apis.GetClientInstance('apikeys', 'v1') <NEW_LINE> context['apikeys-v1-messages'] = apis.GetMessagesModule( 'apikeys', 'v1') <NEW_LINE> return context
Create, enable and manage API services. Google Service Management is an infrastructure service of Google Cloud Platform that manages other APIs and services, including Google's own Cloud Platform services and their APIs, and services created using Google Cloud Endpoints. More information on Service Management can be found here: https://cloud.google.com/service-management and detailed documentation can be found here: https://cloud.google.com/service-management/docs/
6259907d442bda511e95da83
class OSABIEnum(Enum): <NEW_LINE> <INDENT> ELFOSABI_NONE = 0 <NEW_LINE> ELFOSABI_HPUX = 1 <NEW_LINE> ELFOSABI_NETBSD = 2 <NEW_LINE> ELFOSABI_GNU = 3 <NEW_LINE> ELFOSABI_LINUX = 3 <NEW_LINE> ELFOSABI_SOLARIS = 6 <NEW_LINE> ELFOSABI_AIX = 7 <NEW_LINE> ELFOSABI_IRIX = 8 <NEW_LINE> ELFOSABI_FREEBSD = 9 <NEW_LINE> ELFOSABI_TRU64 = 10 <NEW_LINE> ELFOSABI_MODESTO = 11 <NEW_LINE> ELFOSABI_OPENBSD = 12 <NEW_LINE> ELFOSABI_OPENVMS = 13 <NEW_LINE> ELFOSABI_NSK = 14 <NEW_LINE> ELFOSABI_AROS = 15 <NEW_LINE> ELFOSABI_ARM = 97 <NEW_LINE> ELFOSABI_STANDALONE = 255 <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return { OSABIEnum.ELFOSABI_NONE: 'UNIX - System V', OSABIEnum.ELFOSABI_HPUX: 'UNIX - HP-UX', OSABIEnum.ELFOSABI_NETBSD: 'UNIX - NetBSD', OSABIEnum.ELFOSABI_GNU: 'UNIX - GNU', OSABIEnum.ELFOSABI_LINUX: 'UNIX - GNU', OSABIEnum.ELFOSABI_SOLARIS: 'UNIX - Solaris', OSABIEnum.ELFOSABI_AIX: 'UNIX - AIX', OSABIEnum.ELFOSABI_IRIX: 'UNIX - IRIX', OSABIEnum.ELFOSABI_FREEBSD: 'UNIX - FreeBSD', OSABIEnum.ELFOSABI_TRU64: 'UNIX - TRU64', OSABIEnum.ELFOSABI_MODESTO: 'Novell - Modesto', OSABIEnum.ELFOSABI_OPENBSD: 'UNIX - OpenBSD', OSABIEnum.ELFOSABI_OPENVMS: 'VMS - OpenVMS', OSABIEnum.ELFOSABI_NSK: 'HP - Non-Stop Kernel', OSABIEnum.ELFOSABI_AROS: 'AROS', OSABIEnum.ELFOSABI_ARM: 'ARM', OSABIEnum.ELFOSABI_STANDALONE: 'Standalone App', }.get(self, "<unknown: %x>" % self.value)
The valid values found in Ehdr e_ident[EI_OSABI].
6259907d2c8b7c6e89bd523d
class Chulius(object): <NEW_LINE> <INDENT> def __init__(self, julius='julius', conf='', grammar='', target_score=0): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.target_score = target_score <NEW_LINE> self._server = JuliusServer(julius, conf, grammar) <NEW_LINE> self._server.start() <NEW_LINE> <DEDENT> def recognize(self): <NEW_LINE> <INDENT> result = self._server.recognize() <NEW_LINE> if min(result['score']) > self.target_score: <NEW_LINE> <INDENT> return result['sentence'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RecognitionError('Score is lower than target_score.') <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._server.stop() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass
A class for recognition using by Julius. This can choose high score recognition.
6259907d56b00c62f0fb432c
class RingLight(RingEntityMixin, LightEntity): <NEW_LINE> <INDENT> def __init__(self, config_entry_id, device): <NEW_LINE> <INDENT> super().__init__(config_entry_id, device) <NEW_LINE> self._unique_id = device.id <NEW_LINE> self._light_on = device.lights == ON_STATE <NEW_LINE> self._no_updates_until = dt_util.utcnow() <NEW_LINE> <DEDENT> @callback <NEW_LINE> def _update_callback(self): <NEW_LINE> <INDENT> if self._no_updates_until > dt_util.utcnow(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._light_on = self._device.lights == ON_STATE <NEW_LINE> self.async_write_ha_state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return f"{self._device.name} light" <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._light_on <NEW_LINE> <DEDENT> def _set_light(self, new_state): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._device.lights = new_state <NEW_LINE> <DEDENT> except requests.Timeout: <NEW_LINE> <INDENT> _LOGGER.error("Time out setting %s light to %s", self.entity_id, new_state) <NEW_LINE> return <NEW_LINE> <DEDENT> self._light_on = new_state == ON_STATE <NEW_LINE> self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY <NEW_LINE> self.async_write_ha_state() <NEW_LINE> <DEDENT> def turn_on(self, **kwargs): <NEW_LINE> <INDENT> self._set_light(ON_STATE) <NEW_LINE> <DEDENT> def turn_off(self, **kwargs): <NEW_LINE> <INDENT> self._set_light(OFF_STATE)
Creates a switch to turn the ring cameras light on and off.
6259907d656771135c48ad5c
class ChangeStd(): <NEW_LINE> <INDENT> def __init__(self, std): <NEW_LINE> <INDENT> self.std = std <NEW_LINE> <DEDENT> @varargin <NEW_LINE> def __call__(self, x): <NEW_LINE> <INDENT> x_std = torch.std(x.view(len(x), -1), dim=-1) <NEW_LINE> fixed_std = x * (self.std / (x_std + 1e-9)).view(len(x), *[1, ] * (x.dim() - 1)) <NEW_LINE> return fixed_std
Change the standard deviation of input. Arguments: std (float or tensor): Desired std. If tensor, it should be the same length as x.
6259907daad79263cf430212
class ThetaLocator(mticker.Locator): <NEW_LINE> <INDENT> def __init__(self, base): <NEW_LINE> <INDENT> self.base = base <NEW_LINE> self.axis = self.base.axis = _AxisWrapper(self.base.axis) <NEW_LINE> <DEDENT> def set_axis(self, axis): <NEW_LINE> <INDENT> self.axis = _AxisWrapper(axis) <NEW_LINE> self.base.set_axis(self.axis) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> lim = self.axis.get_view_interval() <NEW_LINE> if _is_full_circle_deg(lim[0], lim[1]): <NEW_LINE> <INDENT> return np.arange(8) * 2 * np.pi / 8 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.deg2rad(self.base()) <NEW_LINE> <DEDENT> <DEDENT> @cbook.deprecated("3.2") <NEW_LINE> def autoscale(self): <NEW_LINE> <INDENT> return self.base.autoscale() <NEW_LINE> <DEDENT> def pan(self, numsteps): <NEW_LINE> <INDENT> return self.base.pan(numsteps) <NEW_LINE> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> return self.base.refresh() <NEW_LINE> <DEDENT> def view_limits(self, vmin, vmax): <NEW_LINE> <INDENT> vmin, vmax = np.rad2deg((vmin, vmax)) <NEW_LINE> return np.deg2rad(self.base.view_limits(vmin, vmax)) <NEW_LINE> <DEDENT> def zoom(self, direction): <NEW_LINE> <INDENT> return self.base.zoom(direction)
Used to locate theta ticks. This will work the same as the base locator except in the case that the view spans the entire circle. In such cases, the previously used default locations of every 45 degrees are returned.
6259907d167d2b6e312b82c0
class CoursesItemRESTHandler(utils.BaseRESTHandler): <NEW_LINE> <INDENT> COPY_SAMPLE_COURSE_HOOKS = [] <NEW_LINE> URI = '/rest/courses/item' <NEW_LINE> XSRF_ACTION = 'add-course-put' <NEW_LINE> def put(self): <NEW_LINE> <INDENT> request = transforms.loads(self.request.get('request')) <NEW_LINE> if not self.assert_xsrf_token_or_fail( request, self.XSRF_ACTION, {}): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not CoursesPropertyRights.can_add(): <NEW_LINE> <INDENT> transforms.send_json_response( self, 401, 'Access denied.') <NEW_LINE> return <NEW_LINE> <DEDENT> payload = request.get('payload') <NEW_LINE> json_object = transforms.loads(payload) <NEW_LINE> name = json_object.get('name') <NEW_LINE> title = json_object.get('title') <NEW_LINE> admin_email = json_object.get('admin_email') <NEW_LINE> template_course = json_object.get('template_course') <NEW_LINE> errors = [] <NEW_LINE> entry = sites.add_new_course_entry(name, title, admin_email, errors) <NEW_LINE> if not entry and not errors: <NEW_LINE> <INDENT> errors.append('Error adding a new course entry.') <NEW_LINE> <DEDENT> if errors: <NEW_LINE> <INDENT> transforms.send_json_response(self, 412, '\n'.join(errors)) <NEW_LINE> return <NEW_LINE> <DEDENT> app_context = sites.get_all_courses(entry)[0] <NEW_LINE> new_course = courses.Course(None, app_context=app_context) <NEW_LINE> if not new_course.init_new_course_settings(title, admin_email): <NEW_LINE> <INDENT> transforms.send_json_response( self, 412, 'Added new course entry, but failed to update title and/or ' 'admin email. The course.yaml file already exists and must be ' 'updated manually.') <NEW_LINE> return <NEW_LINE> <DEDENT> if template_course: <NEW_LINE> <INDENT> if template_course != 'sample': <NEW_LINE> <INDENT> transforms.send_json_response( self, 412, 'Unknown template course: %s' % template_course) <NEW_LINE> return <NEW_LINE> <DEDENT> errors = [] <NEW_LINE> src_app_context = sites.get_all_courses('course:/:/:')[0] <NEW_LINE> new_course.import_from(src_app_context, errors) <NEW_LINE> new_course.save() <NEW_LINE> if not errors: <NEW_LINE> <INDENT> common_utils.run_hooks( self.COPY_SAMPLE_COURSE_HOOKS, app_context, errors) <NEW_LINE> <DEDENT> if errors: <NEW_LINE> <INDENT> transforms.send_json_response(self, 412, '\n'.join(errors)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> transforms.send_json_response( self, 200, 'Added.', {'entry': entry})
Provides REST API for course entries.
6259907d23849d37ff852b11
class _FileProxyMixin(object): <NEW_LINE> <INDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return getattr(self.file, attr) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise AttributeError( '%s object has no attribute "%s".' % ( self.__class__.__name__, attr)) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.file)
Proxy methods from an underlying file.
6259907d5fc7496912d48f97
class LayeredListsTests(unittest.TestCase): <NEW_LINE> <INDENT> def testOneLayerNoDepth(self): <NEW_LINE> <INDENT> a = [1, [2], 3] <NEW_LINE> b = [4, [5], 6] <NEW_LINE> expected = ([1, [2], 3], [4, [5], 6]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, 0), expected) <NEW_LINE> <DEDENT> def testOneLayerOneDepth(self): <NEW_LINE> <INDENT> a = [1, [2], 3] <NEW_LINE> b = [4, [5], 6] <NEW_LINE> expected = ([1, 2, 3], [4, 5, 6]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, 1), expected) <NEW_LINE> <DEDENT> def testOneLayerRandomDepth(self): <NEW_LINE> <INDENT> a = [1, [2], 3] <NEW_LINE> b = [4, [5], 6] <NEW_LINE> expected = ([1, 2, 3], [4, 5, 6]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, 157869), expected) <NEW_LINE> <DEDENT> def testOneLayerNegativeDepth(self): <NEW_LINE> <INDENT> a = [1, [2], 3] <NEW_LINE> b = [4, [5], 6] <NEW_LINE> expected = ([1, [2], 3], [4, [5], 6]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, -577), expected) <NEW_LINE> <DEDENT> def testMultiLayerNoDepth(self): <NEW_LINE> <INDENT> a = [1, [2, [3, 4], [5]], 6, [7]] <NEW_LINE> b = [8, [[9], []], []] <NEW_LINE> expected = ([1, [2, [3, 4], [5]], 6, [7]], [8, [[9], []], []]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, 0), expected) <NEW_LINE> <DEDENT> def testMultiLayerOneDepth(self): <NEW_LINE> <INDENT> a = [1, [2, [3, 4], [5]], 6, [7]] <NEW_LINE> b = [8, [[9], []], []] <NEW_LINE> expected = ([1, 2, [3, 4], [5], 6, 7], [8, [9], []]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, 1), expected) <NEW_LINE> <DEDENT> def testMultiLayerRandomDepth(self): <NEW_LINE> <INDENT> a = [1, [2, [3, 4], [5]], 6, [7]] <NEW_LINE> b = [8, [[9], []], []] <NEW_LINE> expected = ([1, 2, 3, 4, 5, 6, 7], [8, 9]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, 849633), expected) <NEW_LINE> <DEDENT> def testMultiLayerNegativeDepth(self): <NEW_LINE> <INDENT> a = [1, [2, [3, 4], [5]], 6, [7]] <NEW_LINE> b = [8, [[9], []], []] <NEW_LINE> expected = ([1, [2, [3, 4], [5]], 6, [7]], [8, [[9], []], []]) <NEW_LINE> self.assertEqual(flatten_two_lists(a, b, -561), expected)
Layered lists with various elements, various depth
6259907d5fcc89381b266e88
class ActorFuture(object): <NEW_LINE> <INDENT> def __init__(self, q, io_loop): <NEW_LINE> <INDENT> self.q = q <NEW_LINE> self.io_loop = io_loop <NEW_LINE> <DEDENT> def result(self, timeout=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._cached_result <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._cached_result = self.q.get(timeout=timeout) <NEW_LINE> return self._cached_result <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<ActorFuture>"
Future to an actor's method call Whenever you call a method on an Actor you get an ActorFuture immediately while the computation happens in the background. You can call ``.result`` to block and collect the full result See Also -------- Actor
6259907da8370b77170f1e29
class Resource(object): <NEW_LINE> <INDENT> def __init__(self, name, ops): <NEW_LINE> <INDENT> log.debug(u"Building resource '%s'", name) <NEW_LINE> self.name = name <NEW_LINE> self.operations = ops <NEW_LINE> <DEDENT> def __deepcopy__(self, memo=None): <NEW_LINE> <INDENT> if memo is None: <NEW_LINE> <INDENT> memo = {} <NEW_LINE> <DEDENT> return self.__class__( name=deepcopy(self.name, memo=memo), ops=deepcopy(self.operations, memo=memo), ) <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.__dict__.clear() <NEW_LINE> self.__dict__.update(state) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> repr = u"{self.__class__.__name__}({self.name})".format(self=self) <NEW_LINE> if PY2: <NEW_LINE> <INDENT> return repr.encode('ascii', 'backslashreplace') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return repr <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> op = self.operations.get(item) <NEW_LINE> if not op: <NEW_LINE> <INDENT> raise AttributeError(u"Resource '{0}' has no operation '{1}'".format(self.name, item)) <NEW_LINE> <DEDENT> return op <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return self.operations.keys() <NEW_LINE> <DEDENT> def is_equal(self, other, ignore_swagger_spec=False): <NEW_LINE> <INDENT> if id(self) == id(other): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.name != other.name: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for operation_id in set(chain(iterkeys(self.operations), iterkeys(other.operations))): <NEW_LINE> <INDENT> operation = self.operations.get(operation_id) <NEW_LINE> if operation is None or not operation.is_equal( other.operations.get(operation_id), ignore_swagger_spec=ignore_swagger_spec, ): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
A Swagger resource is associated with multiple operations. :param name: resource name :type name: str :param ops: operations associated with this resource (by tag) :type ops: dict where (key, value) = (op_name, Operation)
6259907d7d847024c075de36
class Taggable(object): <NEW_LINE> <INDENT> tags_attr = None <NEW_LINE> force_lower_case = True <NEW_LINE> def add_tags(self, names): <NEW_LINE> <INDENT> existing_tags = getattr(self, self.__class__.tags_attr, []) <NEW_LINE> if self.force_lower_case: <NEW_LINE> <INDENT> names = names.lower() <NEW_LINE> <DEDENT> name_list = names.replace(",", " ").replace(";", " ").split() <NEW_LINE> for name in name_list: <NEW_LINE> <INDENT> tag = Tag.get_or_create(category=self.__class__.__name__, name=name) <NEW_LINE> if name not in existing_tags: <NEW_LINE> <INDENT> tag.count += 1 <NEW_LINE> tag.save() <NEW_LINE> existing_tags.append(name) <NEW_LINE> <DEDENT> <DEDENT> setattr(self, self.__class__.tags_attr, existing_tags) <NEW_LINE> <DEDENT> def remove_tags(self, names): <NEW_LINE> <INDENT> existing_tags = getattr(self, self.__class__.tags_attr, []) <NEW_LINE> if self.force_lower_case: <NEW_LINE> <INDENT> names = names.lower() <NEW_LINE> <DEDENT> name_list = names.replace(",", " ").replace(";", " ").split() <NEW_LINE> for name in name_list: <NEW_LINE> <INDENT> if name in existing_tags: <NEW_LINE> <INDENT> while name in existing_tags: <NEW_LINE> <INDENT> existing_tags.remove(name) <NEW_LINE> <DEDENT> tag = Tag.get_unique(category=self.__class__.__name__, name=name) <NEW_LINE> if tag: <NEW_LINE> <INDENT> tag.count -= 1 <NEW_LINE> if tag.count > 0: <NEW_LINE> <INDENT> tag.save() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tag.delete() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> setattr(self, self.__class__.tags_attr, existing_tags) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_by_tag(cls, name, **kwargs): <NEW_LINE> <INDENT> filter_kwargs = {cls.tags_attr: name} <NEW_LINE> query = cls.objects.filter(**filter_kwargs) <NEW_LINE> if "order_by" in kwargs: <NEW_LINE> <INDENT> query = query.order_by(kwargs["order_by"]) <NEW_LINE> <DEDENT> if kwargs.get("cursor"): <NEW_LINE> <INDENT> query.with_cursor(kwargs["cursor"]) <NEW_LINE> <DEDENT> if kwargs.get("limit"): <NEW_LINE> <INDENT> query.set_limit(kwargs["limit"]) <NEW_LINE> <DEDENT> return query
Taggable mixin class. Requirements for sub-classes are: - must have a class attribute 'tags_attr', which is the attribute name of tags. - self.save(): saves the instance to database.
6259907d26068e7796d4e398
class OperationMetadata(_messages.Message): <NEW_LINE> <INDENT> createTime = _messages.StringField(1) <NEW_LINE> endTime = _messages.StringField(2)
OperationMetadata will be used and required as metadata for all operations that created by Container Analysis Providers Fields: createTime: Output only. The time this operation was created. endTime: Output only. The time that this operation was marked completed or failed.
6259907d55399d3f05627f6e
class Processor(AwardProcessor): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> AwardProcessor.__init__(self, 'Nomad', 'Longest Avg. Distance Between Kills', [PLAYER_COL, Column('Meters', Column.NUMBER, Column.DESC)]) <NEW_LINE> self.player_to_pos = dict() <NEW_LINE> self.distance = collections.Counter() <NEW_LINE> self.kills = collections.Counter() <NEW_LINE> <DEDENT> def on_kill(self, e): <NEW_LINE> <INDENT> if not e.valid_kill: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not e.attacker in self.player_to_pos: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> vehicle = model_mgr.get_vehicle(e.attacker.vehicle_id) <NEW_LINE> if vehicle.group == AIR: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> last_pos = self.player_to_pos[e.attacker] <NEW_LINE> distance = stat_mgr.dist_3d(last_pos, e.attacker_pos) <NEW_LINE> self.distance[e.attacker] += round(distance) <NEW_LINE> self.kills[e.attacker] += 1 <NEW_LINE> self.results[e.attacker] = round(self.distance[e.attacker] / self.kills[e.attacker]) <NEW_LINE> self.player_to_pos[e.attacker] = e.attacker_pos <NEW_LINE> <DEDENT> def on_spawn(self, e): <NEW_LINE> <INDENT> self.player_to_pos[e.player] = e.player_pos
Overview This processor tracks the maximum distance travelled between kills. Implementation This implementation tracks the distance travelled between kills... (Distance between prior kill and current kill) The fact that you ran around like a madman for 5 minutes doing laps around the map doesn't help you out in this case. Notes Only count kills that occur on the ground.
6259907dfff4ab517ebcf272
class NextHopResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'next_hop_type': {'key': 'nextHopType', 'type': 'str'}, 'next_hop_ip_address': {'key': 'nextHopIpAddress', 'type': 'str'}, 'route_table_id': {'key': 'routeTableId', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, next_hop_type: Optional[Union[str, "NextHopType"]] = None, next_hop_ip_address: Optional[str] = None, route_table_id: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(NextHopResult, self).__init__(**kwargs) <NEW_LINE> self.next_hop_type = next_hop_type <NEW_LINE> self.next_hop_ip_address = next_hop_ip_address <NEW_LINE> self.route_table_id = route_table_id
The information about next hop from the specified VM. :param next_hop_type: Next hop type. Possible values include: "Internet", "VirtualAppliance", "VirtualNetworkGateway", "VnetLocal", "HyperNetGateway", "None". :type next_hop_type: str or ~azure.mgmt.network.v2019_06_01.models.NextHopType :param next_hop_ip_address: Next hop IP Address. :type next_hop_ip_address: str :param route_table_id: The resource identifier for the route table associated with the route being returned. If the route being returned does not correspond to any user created routes then this field will be the string 'System Route'. :type route_table_id: str
6259907d4527f215b58eb6cd
@inherit_doc <NEW_LINE> class FeatureHasher(JavaTransformer, HasInputCols, HasOutputCol, HasNumFeatures, JavaMLReadable, JavaMLWritable): <NEW_LINE> <INDENT> categoricalCols = Param(Params._dummy(), "categoricalCols", "numeric columns to treat as categorical", typeConverter=TypeConverters.toListString) <NEW_LINE> @keyword_only <NEW_LINE> def __init__(self, numFeatures=1 << 18, inputCols=None, outputCol=None, categoricalCols=None): <NEW_LINE> <INDENT> super(FeatureHasher, self).__init__() <NEW_LINE> self._java_obj = self._new_java_obj("org.apache.spark.ml.feature.FeatureHasher", self.uid) <NEW_LINE> self._setDefault(numFeatures=1 << 18) <NEW_LINE> kwargs = self._input_kwargs <NEW_LINE> self.setParams(**kwargs) <NEW_LINE> <DEDENT> @keyword_only <NEW_LINE> @since("2.3.0") <NEW_LINE> def setParams(self, numFeatures=1 << 18, inputCols=None, outputCol=None, categoricalCols=None): <NEW_LINE> <INDENT> kwargs = self._input_kwargs <NEW_LINE> return self._set(**kwargs) <NEW_LINE> <DEDENT> @since("2.3.0") <NEW_LINE> def setCategoricalCols(self, value): <NEW_LINE> <INDENT> return self._set(categoricalCols=value) <NEW_LINE> <DEDENT> @since("2.3.0") <NEW_LINE> def getCategoricalCols(self): <NEW_LINE> <INDENT> return self.getOrDefault(self.categoricalCols)
.. note:: Experimental Feature hashing projects a set of categorical or numerical features into a feature vector of specified dimension (typically substantially smaller than that of the original feature space). This is done using the hashing trick (https://en.wikipedia.org/wiki/Feature_hashing) to map features to indices in the feature vector. The FeatureHasher transformer operates on multiple columns. Each column may contain either numeric or categorical features. Behavior and handling of column data types is as follows: * Numeric columns: For numeric features, the hash value of the column name is used to map the feature value to its index in the feature vector. By default, numeric features are not treated as categorical (even when they are integers). To treat them as categorical, specify the relevant columns in `categoricalCols`. * String columns: For categorical features, the hash value of the string "column_name=value" is used to map to the vector index, with an indicator value of `1.0`. Thus, categorical features are "one-hot" encoded (similarly to using :py:class:`OneHotEncoder` with `dropLast=false`). * Boolean columns: Boolean values are treated in the same way as string columns. That is, boolean features are represented as "column_name=true" or "column_name=false", with an indicator value of `1.0`. Null (missing) values are ignored (implicitly zero in the resulting feature vector). Since a simple modulo is used to transform the hash function to a vector index, it is advisable to use a power of two as the `numFeatures` parameter; otherwise the features will not be mapped evenly to the vector indices. >>> data = [(2.0, True, "1", "foo"), (3.0, False, "2", "bar")] >>> cols = ["real", "bool", "stringNum", "string"] >>> df = spark.createDataFrame(data, cols) >>> hasher = FeatureHasher(inputCols=cols, outputCol="features") >>> hasher.transform(df).head().features SparseVector(262144, {51871: 1.0, 63643: 1.0, 174475: 2.0, 253195: 1.0}) >>> hasher.setCategoricalCols(["real"]).transform(df).head().features SparseVector(262144, {51871: 1.0, 63643: 1.0, 171257: 1.0, 253195: 1.0}) >>> hasherPath = temp_path + "/hasher" >>> hasher.save(hasherPath) >>> loadedHasher = FeatureHasher.load(hasherPath) >>> loadedHasher.getNumFeatures() == hasher.getNumFeatures() True >>> loadedHasher.transform(df).head().features == hasher.transform(df).head().features True .. versionadded:: 2.3.0
6259907d67a9b606de5477d3
class Token(object): <NEW_LINE> <INDENT> def __init__(self, digit): <NEW_LINE> <INDENT> self.digit = digit <NEW_LINE> self.color = "black" <NEW_LINE> if self.digit == 2 or self.digit == 12: <NEW_LINE> <INDENT> self.pips = 1 <NEW_LINE> <DEDENT> elif self.digit == 3 or self.digit == 11: <NEW_LINE> <INDENT> self.pips = 2 <NEW_LINE> <DEDENT> elif self.digit == 4 or self.digit == 10: <NEW_LINE> <INDENT> self.pips = 3 <NEW_LINE> <DEDENT> elif self.digit == 5 or self.digit == 9: <NEW_LINE> <INDENT> self.pips = 4 <NEW_LINE> <DEDENT> elif self.digit == 6 or self.digit == 8: <NEW_LINE> <INDENT> self.pips = 5 <NEW_LINE> self.color = "red" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pips = 0 <NEW_LINE> <DEDENT> <DEDENT> def get_dictionary(self): <NEW_LINE> <INDENT> if self.digit == 0: <NEW_LINE> <INDENT> return "None" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> token_dict = {} <NEW_LINE> token_dict["token_digit"] = self.digit <NEW_LINE> token_dict["token_pips"] = self.pips <NEW_LINE> token_dict["token_color"] = self.color <NEW_LINE> return token_dict
Class for defining a Tile Object Current Attributes ------------------ digit: Int color: String pips: Int Methods ------- place_tile(int, int) get_token_dict()
6259907dad47b63b2c5a92aa
class TipoHiloCuerda(BaseConstModel): <NEW_LINE> <INDENT> __tablename__ = 'tipo_hilo_cuerda'
Los diferentes marteriales que se pueden llegar a usar para hacer la cuerda.
6259907dbf627c535bcb2f2b
class TextSearch(BaseExpression): <NEW_LINE> <INDENT> def __init__(self, pattern, use_re=False, case=False): <NEW_LINE> <INDENT> self._pattern = unicode(pattern) <NEW_LINE> self.negated = 0 <NEW_LINE> self.use_re = use_re <NEW_LINE> self.case = case <NEW_LINE> self._build_re(self._pattern, use_re=use_re, case=case) <NEW_LINE> self.titlesearch = TitleSearch(self._pattern, use_re=use_re, case=case) <NEW_LINE> <DEDENT> def costs(self): <NEW_LINE> <INDENT> return 10000 <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> neg = self.negated and '-' or '' <NEW_LINE> return u'%s"%s"' % (neg, unicode(self._pattern)) <NEW_LINE> <DEDENT> def highlight_re(self): <NEW_LINE> <INDENT> return u"(%s)" % self._pattern <NEW_LINE> <DEDENT> def search(self, page): <NEW_LINE> <INDENT> matches = [] <NEW_LINE> results = self.titlesearch.search(page) <NEW_LINE> if results: <NEW_LINE> <INDENT> matches.extend(results) <NEW_LINE> <DEDENT> body = page.get_raw_body() <NEW_LINE> for match in self.search_re.finditer(body): <NEW_LINE> <INDENT> matches.append(TextMatch(re_match=match)) <NEW_LINE> <DEDENT> if ((self.negated and matches) or (not self.negated and not matches)): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif matches: <NEW_LINE> <INDENT> return matches <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [Match()] <NEW_LINE> <DEDENT> <DEDENT> def lupy_term(self): <NEW_LINE> <INDENT> or_term = BooleanQuery() <NEW_LINE> term = self.titlesearch.lupy_term() <NEW_LINE> or_term.add(term, False, False) <NEW_LINE> pattern = self._pattern.lower() <NEW_LINE> if self.use_re: <NEW_LINE> <INDENT> if pattern[0] == '^': <NEW_LINE> <INDENT> pattern = pattern[1:] <NEW_LINE> <DEDENT> if pattern[:2] == '\b': <NEW_LINE> <INDENT> pattern = pattern[2:] <NEW_LINE> <DEDENT> term = RegularExpressionQuery(Term("text", pattern)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> terms = pattern.lower().split() <NEW_LINE> terms = [list(tokenizer(t)) for t in terms] <NEW_LINE> term = BooleanQuery() <NEW_LINE> for t in terms: <NEW_LINE> <INDENT> if len(t) == 1: <NEW_LINE> <INDENT> term.add(CamelCaseQuery(Term("text", t[0])), True, False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> phrase = PhraseQuery() <NEW_LINE> for w in t: <NEW_LINE> <INDENT> phrase.add(Term("text", w)) <NEW_LINE> <DEDENT> term.add(phrase, True, False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> or_term.add(term, False, False) <NEW_LINE> return or_term
A term that does a normal text search Both page content and the page title are searched, using an additional TitleSearch term.
6259907d167d2b6e312b82c1
class MeanBias(BinaryMetric): <NEW_LINE> <INDENT> def run(self, ref_dataset, target_dataset, absolute=False): <NEW_LINE> <INDENT> diff = ref_dataset.values - target_dataset.values <NEW_LINE> if absolute: <NEW_LINE> <INDENT> diff = abs(diff) <NEW_LINE> <DEDENT> mean_bias = diff.mean(axis=0) <NEW_LINE> return mean_bias
Calculate the mean bias
6259907d56b00c62f0fb432e
class ILESSCSSControlPanel(Interface): <NEW_LINE> <INDENT> enable_less_stylesheets = schema.Bool( title=_(u'label_enable_less_stylesheets', default=u'Enable client-side compiling LESS stylesheets'), description=_(u'help_enable_less_stylesheets', default=u"This setting will control the way LESS stylesheets are compiled for this site. " u"Client-side compiling is intended to use while in (theme) development mode. " u"Server-side compiled LESS resources are recommended in production mode. " u"By unsetting this option, this site will server-side compile them into CSS " u"resources and enable cache on them."), default=True )
Global oAuth settings. This describes records stored in the configuration registry and obtainable via plone.registry.
6259907d91f36d47f2231bbb
class figure_dialog(wx.Dialog): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> pre = wx.PreDialog() <NEW_LINE> self.PostCreate(pre) <NEW_LINE> res = xrc.XmlResource('figure_name_dialog.xrc') <NEW_LINE> res.LoadOnDialog(self, None, "main_dialog") <NEW_LINE> self.Bind(wx.EVT_BUTTON, self.on_ok, xrc.XRCCTRL(self, "ok_button")) <NEW_LINE> self.Bind(wx.EVT_BUTTON, self.on_cancel, xrc.XRCCTRL(self, "cancel_button")) <NEW_LINE> self.figure_name_ctrl = xrc.XRCCTRL(self, "figure_name_ctrl") <NEW_LINE> self.figure_number_ctrl = xrc.XRCCTRL(self, "figure_number_ctrl") <NEW_LINE> self.figure_number_ctrl.Bind(wx.EVT_TEXT_ENTER, self.on_enter) <NEW_LINE> <DEDENT> def on_ok(self, event): <NEW_LINE> <INDENT> self.EndModal(wx.ID_OK) <NEW_LINE> <DEDENT> def on_enter(self, event): <NEW_LINE> <INDENT> name = self.figure_name_ctrl.GetValue() <NEW_LINE> fig_num_str = self.figure_number_ctrl.GetValue() <NEW_LINE> try: <NEW_LINE> <INDENT> fig_num = int(fig_num_str) <NEW_LINE> valid_num = True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> valid_num = False <NEW_LINE> <DEDENT> if valid_num and name: <NEW_LINE> <INDENT> self.EndModal(wx.ID_OK) <NEW_LINE> <DEDENT> <DEDENT> def on_cancel(self, event): <NEW_LINE> <INDENT> self.EndModal(wx.ID_CANCEL)
Dialog to set a group of plot descriptions as a :py:class:`figure` instance. The dialog prompts the user for a figure name and number. The number sets the hotkey on the figure menu for switching to that plot. Note that no attempt is made to check if the user is overwriting an existing figure on the menu. Note that this class uses wxPython xrc to create a dialog within an app that is created from a different wxPython xrc file. I am using the wxPython two stage creation approach (sort of, I guess). That is what the webpage I found this on said and that is what the pre and post stuff does.
6259907d5fcc89381b266e89
class Base(): <NEW_LINE> <INDENT> G_TYPE_CONSTANT = 'CONSTANT' <NEW_LINE> G_TYPE_VARIABLE = 'VARIABLE' <NEW_LINE> G_TYPE_OPERATION = 'OPERATION' <NEW_LINE> def __init__(self, name, value, type): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.value = value <NEW_LINE> self.type = type <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.expression() <NEW_LINE> <DEDENT> def __add__(self, op): <NEW_LINE> <INDENT> return add(self, op) <NEW_LINE> <DEDENT> def __sub__(self, op): <NEW_LINE> <INDENT> return substract(self, op) <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> return substract(Constant(0), self) <NEW_LINE> <DEDENT> def __mul__(self, op): <NEW_LINE> <INDENT> return multiply(self, op) <NEW_LINE> <DEDENT> def __truediv__(self, op): <NEW_LINE> <INDENT> return divide(self, op) <NEW_LINE> <DEDENT> def __pow__(self, op): <NEW_LINE> <INDENT> return pow(self, op) <NEW_LINE> <DEDENT> def partial_gradient(self, partial): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def expression(self): <NEW_LINE> <INDENT> pass
自动求导基类
6259907d283ffb24f3cf52fb
class ProfileCustom(ProfileM3A8): <NEW_LINE> <INDENT> arch = 'custom'
A Profile measure class for Custom.
6259907de1aae11d1e7cf53f
class ActionConflictError(Exception): <NEW_LINE> <INDENT> def __init__(self, message, conflicting_state, states, g, item1, item2): <NEW_LINE> <INDENT> Exception.__init__(self, message) <NEW_LINE> self.states = states <NEW_LINE> self.conflicting_state = conflicting_state <NEW_LINE> self.g = g <NEW_LINE> self.item1 = item1 <NEW_LINE> self.item2 = item2 <NEW_LINE> <DEDENT> def format_trace(self): <NEW_LINE> <INDENT> state = self.conflicting_state <NEW_LINE> res = [] <NEW_LINE> res.append('\n'.join([('>' if i in (self.item1, self.item2) else ' ') + _format_trace(self.g, item) for i, item in enumerate(state.itemlist)])) <NEW_LINE> while True: <NEW_LINE> <INDENT> parent_symbol = state.parent_symbol <NEW_LINE> next_id = state.parent_id <NEW_LINE> state = self.states[next_id] if next_id != None else None <NEW_LINE> if state is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> res.append('\n'.join([('>' if _next_token(self.g, item) == parent_symbol else ' ') + _format_trace(self.g, item) for i, item in enumerate(state.itemlist)])) <NEW_LINE> <DEDENT> return '\n\n'.join(reversed(res)) <NEW_LINE> <DEDENT> def print_trace(self, file=sys.stderr): <NEW_LINE> <INDENT> print >>file, self.format_trace() <NEW_LINE> <DEDENT> def counterexample(self): <NEW_LINE> <INDENT> trace = [] <NEW_LINE> st = self.conflicting_state <NEW_LINE> while st.parent_id: <NEW_LINE> <INDENT> trace.append(st.parent_symbol) <NEW_LINE> st = self.states[st.parent_id] <NEW_LINE> <DEDENT> trace.append(st.parent_symbol) <NEW_LINE> if hasattr(self.g, 'token_comments'): <NEW_LINE> <INDENT> trace = [self.g.token_comments.get(sym, sym) for sym in trace] <NEW_LINE> <DEDENT> return tuple(reversed(trace))
Raised during a construction of a parser, if the grammar is not LR(k).
6259907d1b99ca4002290263
class Parameter_Stmt(StmtBase, CALLBase): <NEW_LINE> <INDENT> subclass_names = [] <NEW_LINE> use_names = ['Named_Constant_Def_List'] <NEW_LINE> def match(string): return CALLBase.match('PARAMETER', Named_Constant_Def_List, string, require_rhs=True) <NEW_LINE> match = staticmethod(match)
<parameter-stmt> = PARAMETER ( <named-constant-def-list> )
6259907dfff4ab517ebcf274
class InlineAdminForm(AdminForm): <NEW_LINE> <INDENT> def __init__(self, formset, form, fieldsets, prepopulated_fields, original): <NEW_LINE> <INDENT> self.formset = formset <NEW_LINE> self.original = original <NEW_LINE> if original is not None: <NEW_LINE> <INDENT> self.original_content_type_id = ContentType.objects.get_for_model(original).pk <NEW_LINE> <DEDENT> self.show_url = original and hasattr(original, 'get_absolute_url') <NEW_LINE> super(InlineAdminForm, self).__init__(form, fieldsets, prepopulated_fields) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for name, options in self.fieldsets: <NEW_LINE> <INDENT> yield InlineFieldset(self.formset, self.form, name, **options) <NEW_LINE> <DEDENT> <DEDENT> def has_auto_field(self): <NEW_LINE> <INDENT> if self.form._meta.model._meta.has_auto_field: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for parent in self.form._meta.model._meta.get_parent_list(): <NEW_LINE> <INDENT> if parent._meta.has_auto_field: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def field_count(self): <NEW_LINE> <INDENT> num_of_fields = 0 <NEW_LINE> if self.has_auto_field(): <NEW_LINE> <INDENT> num_of_fields += 1 <NEW_LINE> <DEDENT> num_of_fields += len(self.fieldsets[0][1]["fields"]) <NEW_LINE> if self.formset.can_order: <NEW_LINE> <INDENT> num_of_fields += 1 <NEW_LINE> <DEDENT> if self.formset.can_delete: <NEW_LINE> <INDENT> num_of_fields += 1 <NEW_LINE> <DEDENT> return num_of_fields <NEW_LINE> <DEDENT> def pk_field(self): <NEW_LINE> <INDENT> return AdminField(self.form, self.formset._pk_field.name, False) <NEW_LINE> <DEDENT> def fk_field(self): <NEW_LINE> <INDENT> fk = getattr(self.formset, "fk", None) <NEW_LINE> if fk: <NEW_LINE> <INDENT> return AdminField(self.form, fk.name, False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> <DEDENT> def deletion_field(self): <NEW_LINE> <INDENT> from django.forms.formsets import DELETION_FIELD_NAME <NEW_LINE> return AdminField(self.form, DELETION_FIELD_NAME, False) <NEW_LINE> <DEDENT> def ordering_field(self): <NEW_LINE> <INDENT> from django.forms.formsets import ORDERING_FIELD_NAME <NEW_LINE> return AdminField(self.form, ORDERING_FIELD_NAME, False)
A wrapper around an inline form for use in the admin system.
6259907ddc8b845886d55017
class Discarded(Event, AggregateRoot.Discarded): <NEW_LINE> <INDENT> @property <NEW_LINE> def user_id(self): <NEW_LINE> <INDENT> return self.__dict__['user_id']
Published when a list is discarded.
6259907df548e778e596cfee
class RubyXdg(RubyPackage): <NEW_LINE> <INDENT> homepage = "https://www.alchemists.io/projects/xdg/" <NEW_LINE> url = "https://rubygems.org/downloads/xdg-2.2.5.gem" <NEW_LINE> version('2.2.5', sha256='f3a5f799363852695e457bb7379ac6c4e3e8cb3a51ce6b449ab47fbb1523b913', expand=False)
Provides a Ruby implementation of the XDG Base Directory Specification.
6259907da8370b77170f1e2c
class CompileZip(compile_rule.CompileBase): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.file_mapper = kwargs.pop('file_mapper', lambda f: f) <NEW_LINE> super(CompileZip, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def version(self): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> def build(self, outfile_name, infile_names, _, context): <NEW_LINE> <INDENT> with zipfile.ZipFile(self.abspath(outfile_name), 'w') as z: <NEW_LINE> <INDENT> for f in infile_names: <NEW_LINE> <INDENT> zipname = self.file_mapper(f) <NEW_LINE> z.write(self.abspath(f), zipname, zipfile.ZIP_STORED)
Compile all the input files into a zipfile.
6259907d5fcc89381b266e8a
class PagingSchema(Schema): <NEW_LINE> <INDENT> page = fields.Int() <NEW_LINE> pages = fields.Int() <NEW_LINE> per_page = fields.Int() <NEW_LINE> total = fields.Int() <NEW_LINE> @post_dump(pass_many=False) <NEW_LINE> def move_to_meta(self, data): <NEW_LINE> <INDENT> items = data.pop('items') <NEW_LINE> return {'meta': data, 'items': items}
Base class for paging schema.
6259907d4f6381625f19a1dc
class UnifispotModule(Blueprint): <NEW_LINE> <INDENT> def __init__(self, name,mtype,*args, **kwargs): <NEW_LINE> <INDENT> name = "unifispot.modules." + name <NEW_LINE> self.mtype = mtype <NEW_LINE> super(UnifispotModule, self).__init__(name, *args, **kwargs)
Overwrite blueprint namespace to unifispot.modules.name
6259907d7d847024c075de3a
class ProgressBar: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.count = 0 <NEW_LINE> <DEDENT> def update(self, tmp, block_size, total_size): <NEW_LINE> <INDENT> self.count += block_size <NEW_LINE> percent = f"{int(100 * self.count / total_size)}" <NEW_LINE> filled_length = int(100 * self.count // total_size) <NEW_LINE> pbar = "#" * filled_length + '-' * (100 - filled_length) <NEW_LINE> print("\r|%s| %s%%" % (pbar, percent), end="\r") <NEW_LINE> if self.count == total_size: <NEW_LINE> <INDENT> print() <NEW_LINE> <DEDENT> <DEDENT> def end(self, total_size): <NEW_LINE> <INDENT> self.count = total_size <NEW_LINE> self.update(None, 0, total_size)
Basic Progress Bar. Inspired from: https://stackoverflow.com/questions/3173320/text-progress-bar-in-the-console
6259907da8370b77170f1e2d
class Cgmvolf(Cgmvapich2, OpenBLAS, ScaLAPACK, Fftw): <NEW_LINE> <INDENT> NAME = 'cgmvolf'
Compiler toolchain with Clang, GFortran, MVAPICH2, OpenBLAS, ScaLAPACK and FFTW.
6259907d8a349b6b43687cba
@ddt.ddt <NEW_LINE> class AccessibilityPageTest(AcceptanceTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(AccessibilityPageTest, self).setUp() <NEW_LINE> self.accessibility_page = AccessibilityPage(self.browser) <NEW_LINE> <DEDENT> def test_page_loads(self): <NEW_LINE> <INDENT> self.accessibility_page.visit() <NEW_LINE> self.assertTrue(self.accessibility_page.header_text_on_page()) <NEW_LINE> <DEDENT> def test_successful_submit(self): <NEW_LINE> <INDENT> self.accessibility_page.visit() <NEW_LINE> self.accessibility_page.fill_form(email='[email protected]', name='Bok-choy', message='I\'m testing you.') <NEW_LINE> self.accessibility_page.submit_form() <NEW_LINE> <DEDENT> @ddt.data( ('email', 'Enter a valid email address', '', 'Bok-choy', 'I\'m testing you.'), ('fullName', 'Enter a name', '[email protected]', '', 'I\'m testing you.'), ('message', 'Enter a message', '[email protected]', 'Bok-choy', ''), ) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_error_submit(self, field_missing, error_message_text, email, name, message): <NEW_LINE> <INDENT> self.accessibility_page.visit() <NEW_LINE> self.accessibility_page.fill_form(email=email, name=name, message=message) <NEW_LINE> self.accessibility_page.error_message_is_shown_with_text(field_missing, text=error_message_text) <NEW_LINE> self.accessibility_page.submit_form() <NEW_LINE> self.accessibility_page.alert_has_text(error_message_text) <NEW_LINE> <DEDENT> def test_error_messages(self): <NEW_LINE> <INDENT> self.accessibility_page.visit() <NEW_LINE> self.check_error_message('email', 'Enter a valid email address') <NEW_LINE> self.check_error_message('fullName', 'Enter a name') <NEW_LINE> self.check_error_message('message', 'Enter a message', field_type='textarea') <NEW_LINE> <DEDENT> def check_error_message(self, field_id, error_message_text, field_type='input'): <NEW_LINE> <INDENT> self.accessibility_page.leave_field_blank(field_id, field_type=field_type) <NEW_LINE> self.accessibility_page.error_message_is_shown_with_text(field_id, text=error_message_text)
Test that a user can access the page and submit studio accessibility feedback.
6259907d7cff6e4e811b749d
class tsRazaoSocial(pyxb.binding.datatypes.string): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, "tsRazaoSocial") <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location( "/Users/Marcelo/Dev/Projetos/PyNFSe/PyNFSe/XSD/Curitiba/Template/nfse.xsd", 124, 1 ) <NEW_LINE> _Documentation = None
An atomic simple type.
6259907de1aae11d1e7cf540
class Disk_on_sphere(Function2D, metaclass=FunctionMeta): <NEW_LINE> <INDENT> def _set_units(self, x_unit, y_unit, z_unit): <NEW_LINE> <INDENT> self.lon0.unit = x_unit <NEW_LINE> self.lat0.unit = y_unit <NEW_LINE> self.radius.unit = x_unit <NEW_LINE> <DEDENT> def evaluate(self, x, y, lon0, lat0, radius): <NEW_LINE> <INDENT> lon, lat = x,y <NEW_LINE> angsep = angular_distance(lon0, lat0, lon, lat) <NEW_LINE> return np.power(old_div(180, np.pi), 2) * 1. / (np.pi * radius ** 2) * (angsep <= radius) <NEW_LINE> <DEDENT> def get_boundaries(self): <NEW_LINE> <INDENT> max_radius = self.radius.max_value <NEW_LINE> min_lat = max(-90., self.lat0.value - 2 * max_radius) <NEW_LINE> max_lat = min(90., self.lat0.value + 2 * max_radius) <NEW_LINE> max_abs_lat = max(np.absolute(min_lat), np.absolute(max_lat)) <NEW_LINE> if max_abs_lat > 89. or 2 * max_radius / np.cos(max_abs_lat * np.pi / 180.) >= 180.: <NEW_LINE> <INDENT> min_lon = 0. <NEW_LINE> max_lon = 360. <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> min_lon = self.lon0.value - 2 * max_radius / np.cos(max_abs_lat * np.pi / 180.) <NEW_LINE> max_lon = self.lon0.value + 2 * max_radius / np.cos(max_abs_lat * np.pi / 180.) <NEW_LINE> if min_lon < 0.: <NEW_LINE> <INDENT> min_lon = min_lon + 360. <NEW_LINE> <DEDENT> elif max_lon > 360.: <NEW_LINE> <INDENT> max_lon = max_lon - 360. <NEW_LINE> <DEDENT> <DEDENT> return (min_lon, max_lon), (min_lat, max_lat) <NEW_LINE> <DEDENT> def get_total_spatial_integral(self, z=None): <NEW_LINE> <INDENT> if isinstance( z, u.Quantity): <NEW_LINE> <INDENT> z = z.value <NEW_LINE> <DEDENT> return np.ones_like( z )
description : A bidimensional disk/tophat function on a sphere (in spherical coordinates) latex : $$ f(\vec{x}) = \left(\frac{180}{\pi}\right)^2 \frac{1}{\pi~({\rm radius})^2} ~\left\{\begin{matrix} 1 & {\rm if}& {\rm | \vec{x} - \vec{x}_0| \le {\rm radius}} \\ 0 & {\rm if}& {\rm | \vec{x} - \vec{x}_0| > {\rm radius}} \end{matrix}\right. $$ parameters : lon0 : desc : Longitude of the center of the source initial value : 0.0 min : 0.0 max : 360.0 lat0 : desc : Latitude of the center of the source initial value : 0.0 min : -90.0 max : 90.0 radius : desc : Radius of the disk initial value : 15 min : 0 max : 20
6259907d66673b3332c31e5c
class Fitness(object): <NEW_LINE> <INDENT> def __init__(self, value=0, category=MAX): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self.category = category <NEW_LINE> <DEDENT> def deep_copy(self): <NEW_LINE> <INDENT> return copy.deepcopy(self) <NEW_LINE> <DEDENT> def __lt__(self, fitness): <NEW_LINE> <INDENT> if self.category == MAX: <NEW_LINE> <INDENT> return self.value < fitness.value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.value > fitness.value <NEW_LINE> <DEDENT> <DEDENT> def __le__(self, fitness): <NEW_LINE> <INDENT> if self.__eq__(fitness): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.category == MAX: <NEW_LINE> <INDENT> return self.value < fitness.value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.value > fitness.value <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, fitness): <NEW_LINE> <INDENT> return real_eq(self.value, fitness.value) <NEW_LINE> <DEDENT> def __ne__(self, fitness): <NEW_LINE> <INDENT> return not self.__eq__(fitness) <NEW_LINE> <DEDENT> def __gt__(self, fitness): <NEW_LINE> <INDENT> if self.category == MAX: <NEW_LINE> <INDENT> return self.value > fitness.value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.value < fitness.value <NEW_LINE> <DEDENT> <DEDENT> def __ge__(self, fitness): <NEW_LINE> <INDENT> if self.__eq__(fitness): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.category == MAX: <NEW_LINE> <INDENT> return self.value > fitness.value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.value < fitness.value <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.category == MIN: <NEW_LINE> <INDENT> category_name = MIN_NAME <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> category_name = MAX_NAME <NEW_LINE> <DEDENT> string = '' <NEW_LINE> string += '{ ' <NEW_LINE> string += '\"category\": %s, ' % category_name <NEW_LINE> string += '\"value\": %s' % str(self.value) <NEW_LINE> string += ' }' <NEW_LINE> return string
Fitness categories(types): MIN == 1 -> grater fitness is better MAX == 0 -> smaller fitness is better
6259907d5166f23b2e244e36
class ListCommand(lister.Lister, command.Command): <NEW_LINE> <INDENT> columns = ('uuid', 'label', 'status') <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(ListCommand, self).get_parser(prog_name) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> data = bs_image.get_all() <NEW_LINE> data = data_utils.get_display_data_multi(self.columns, data) <NEW_LINE> return (self.columns, data)
List all available bootstrap images.
6259907d97e22403b383c95e
class PostCategory(Base) : <NEW_LINE> <INDENT> __tablename__ = 'post_categories' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String(64), index=True, unique=True) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Category %r>' % self.name
Create Categories table For Posts
6259907d5fdd1c0f98e5f9dd
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = get_user_model() <NEW_LINE> fields = ('email', 'password', 'name') <NEW_LINE> extra_kwargs = {'password': {'write_only': True, 'min_length': 5}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> return get_user_model().objects.create_user(**validated_data) <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> password = validated_data.pop('password', None) <NEW_LINE> user = super().update(instance, validated_data) <NEW_LINE> if password: <NEW_LINE> <INDENT> user.set_password(password) <NEW_LINE> user.save() <NEW_LINE> <DEDENT> return user
Serializer for the user object.
6259907d67a9b606de5477d5
@mock.patch('os.chdir') <NEW_LINE> class ChdirTests(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.orig_cwd = os.getcwd() <NEW_LINE> self.dst_dir = 'test' <NEW_LINE> <DEDENT> def test_os_chdir_is_called_with_dst_dir_in_entry(self, mock_chdir): <NEW_LINE> <INDENT> with chdir(self.dst_dir): <NEW_LINE> <INDENT> mock_chdir.assert_called_once_with(self.dst_dir) <NEW_LINE> <DEDENT> <DEDENT> def test_os_chdir_is_called_with_orig_cwd_in_exit(self, mock_chdir): <NEW_LINE> <INDENT> with chdir(self.dst_dir): <NEW_LINE> <INDENT> mock_chdir.reset_mock() <NEW_LINE> <DEDENT> mock_chdir.assert_called_once_with(self.orig_cwd) <NEW_LINE> <DEDENT> def test_os_chdir_is_called_with_orig_cwd_in_exit_even_if_exception_occurs( self, mock_chdir): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with chdir(self.dst_dir): <NEW_LINE> <INDENT> mock_chdir.reset_mock() <NEW_LINE> raise RuntimeError <NEW_LINE> <DEDENT> <DEDENT> except RuntimeError: <NEW_LINE> <INDENT> mock_chdir.assert_called_once_with(self.orig_cwd)
Tests for the chdir() context manager.
6259907d7047854f46340e14
class UserCart(object): <NEW_LINE> <INDENT> def __init__(self, user_id=None): <NEW_LINE> <INDENT> self.swagger_types = { 'user_id': 'int' } <NEW_LINE> self.attribute_map = { 'user_id': 'userId' } <NEW_LINE> self._user_id = user_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def user_id(self): <NEW_LINE> <INDENT> return self._user_id <NEW_LINE> <DEDENT> @user_id.setter <NEW_LINE> def user_id(self, user_id): <NEW_LINE> <INDENT> self._user_id = user_id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, UserCart): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259907d63b5f9789fe86bc6
class ExamRoom(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Klausurraum') <NEW_LINE> verbose_name_plural = _('Klausurräume') <NEW_LINE> ordering = ['available', '-capacity_1_free', '-capacity_2_free', 'room'] <NEW_LINE> <DEDENT> room = models.OneToOneField('ophasebase.Room', models.CASCADE, verbose_name=_('Raum'), limit_choices_to={"type": "HS"}) <NEW_LINE> available = models.BooleanField(verbose_name=_('Verfügbar'), default=True) <NEW_LINE> capacity_1_free = models.IntegerField(verbose_name=_('Plätze (1 Platz Abstand)')) <NEW_LINE> capacity_2_free = models.IntegerField(verbose_name=_('Plätze (2 Plätze Abstand)')) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(self.room) <NEW_LINE> <DEDENT> def capacity(self, spacing): <NEW_LINE> <INDENT> return self.capacity_1_free if spacing == 1 else self.capacity_2_free <NEW_LINE> <DEDENT> def seats(self, spacing, ratio): <NEW_LINE> <INDENT> return math.ceil(self.capacity(spacing) * ratio)
A room which is suitable for the exam.
6259907d283ffb24f3cf52ff
class TestPyfakefsUnittest(fake_filesystem_unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.setUpPyfakefs() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.tearDownPyfakefs() <NEW_LINE> <DEDENT> def test_file(self): <NEW_LINE> <INDENT> self.assertFalse(os.path.exists('/fake_file.txt')) <NEW_LINE> with file('/fake_file.txt', 'w') as f: <NEW_LINE> <INDENT> f.write("This test file was created using the file() function.\n") <NEW_LINE> <DEDENT> self.assertTrue(self.fs.Exists('/fake_file.txt')) <NEW_LINE> with file('/fake_file.txt') as f: <NEW_LINE> <INDENT> content = f.read() <NEW_LINE> <DEDENT> self.assertEqual(content, 'This test file was created using the file() function.\n') <NEW_LINE> <DEDENT> def test_open(self): <NEW_LINE> <INDENT> self.assertFalse(os.path.exists('/fake_file.txt')) <NEW_LINE> with open('/fake_file.txt', 'w') as f: <NEW_LINE> <INDENT> f.write("This test file was created using the open() function.\n") <NEW_LINE> <DEDENT> self.assertTrue(self.fs.Exists('/fake_file.txt')) <NEW_LINE> with open('/fake_file.txt') as f: <NEW_LINE> <INDENT> content = f.read() <NEW_LINE> <DEDENT> self.assertEqual(content, 'This test file was created using the open() function.\n') <NEW_LINE> <DEDENT> def test_os(self): <NEW_LINE> <INDENT> self.assertFalse(self.fs.Exists('/test/dir1/dir2')) <NEW_LINE> os.makedirs('/test/dir1/dir2') <NEW_LINE> self.assertTrue(self.fs.Exists('/test/dir1/dir2')) <NEW_LINE> <DEDENT> def test_glob(self): <NEW_LINE> <INDENT> self.assertItemsEqual(glob.glob('/test/dir1/dir*'), []) <NEW_LINE> self.fs.CreateDirectory('/test/dir1/dir2a') <NEW_LINE> self.assertItemsEqual(glob.glob('/test/dir1/dir*'), ['/test/dir1/dir2a']) <NEW_LINE> self.fs.CreateDirectory('/test/dir1/dir2b') <NEW_LINE> self.assertItemsEqual(glob.glob('/test/dir1/dir*'), ['/test/dir1/dir2a', '/test/dir1/dir2b']) <NEW_LINE> <DEDENT> def test_shutil(self): <NEW_LINE> <INDENT> self.fs.CreateDirectory('/test/dir1/dir2a') <NEW_LINE> self.fs.CreateDirectory('/test/dir1/dir2b') <NEW_LINE> self.assertTrue(self.fs.Exists('/test/dir1/dir2b')) <NEW_LINE> self.assertTrue(self.fs.Exists('/test/dir1/dir2a')) <NEW_LINE> shutil.rmtree('/test/dir1') <NEW_LINE> self.assertFalse(self.fs.Exists('/test/dir1')) <NEW_LINE> <DEDENT> def test_tempfile(self): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile() as tf: <NEW_LINE> <INDENT> tf.write('Temporary file contents\n') <NEW_LINE> name = tf.name <NEW_LINE> self.assertTrue(self.fs.Exists(tf.name))
Test the pyfakefs.fake_filesystem_unittest.TestCase` base class.
6259907de1aae11d1e7cf541
class Setle(X86InstructionBase): <NEW_LINE> <INDENT> def __init__(self, prefix, mnemonic, operands, architecture_mode): <NEW_LINE> <INDENT> super(Setle, self).__init__(prefix, mnemonic, operands, architecture_mode) <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_operands(self): <NEW_LINE> <INDENT> return [ ] <NEW_LINE> <DEDENT> @property <NEW_LINE> def destination_operands(self): <NEW_LINE> <INDENT> return [ ]
Representation of Setle x86 instruction.
6259907d3346ee7daa338391
class TokenAndKeyAuthentication(TokenAuthentication): <NEW_LINE> <INDENT> model = AuthToken <NEW_LINE> def authenticate(self, request): <NEW_LINE> <INDENT> apikey = request.META.get('X_API_KEY', '') <NEW_LINE> if apikey and not ApiKey.objects.filter(key=apikey, active=True).exists(): <NEW_LINE> <INDENT> raise exceptions.AuthenticationFailed('Invalid API key') <NEW_LINE> <DEDENT> auth = super(TokenAndKeyAuthentication, self).authenticate(request) <NEW_LINE> if auth is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not auth[0].is_verified: <NEW_LINE> <INDENT> raise exceptions.AuthenticationFailed('Email address is not yet verified!') <NEW_LINE> <DEDENT> if not auth[0].is_active: <NEW_LINE> <INDENT> raise exceptions.AuthenticationFailed('User account is inactive!') <NEW_LINE> <DEDENT> return auth
A custom token authetication backend that checks the an API key as well. Additionally, the the user account must be active and verified.
6259907dbe7bc26dc9252b85
class Cell(object): <NEW_LINE> <INDENT> def __init__(self, row, col): <NEW_LINE> <INDENT> self.row = row <NEW_LINE> self.col = col <NEW_LINE> self.visited = False <NEW_LINE> self.active = False <NEW_LINE> self.is_entry_exit = None <NEW_LINE> self.walls = {"top": True, "right": True, "bottom": True, "left": True} <NEW_LINE> self.neighbours = list() <NEW_LINE> <DEDENT> def is_walls_between(self, neighbour): <NEW_LINE> <INDENT> if self.row - neighbour.row == 1 and self.walls["top"] and neighbour.walls["bottom"]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self.row - neighbour.row == -1 and self.walls["bottom"] and neighbour.walls["top"]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self.col - neighbour.col == 1 and self.walls["left"] and neighbour.walls["right"]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self.col - neighbour.col == -1 and self.walls["right"] and neighbour.walls["left"]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def remove_walls(self, neighbour_row, neighbour_col): <NEW_LINE> <INDENT> if self.row - neighbour_row == 1: <NEW_LINE> <INDENT> self.walls["top"] = False <NEW_LINE> return True, "" <NEW_LINE> <DEDENT> elif self.row - neighbour_row == -1: <NEW_LINE> <INDENT> self.walls["bottom"] = False <NEW_LINE> return True, "" <NEW_LINE> <DEDENT> elif self.col - neighbour_col == 1: <NEW_LINE> <INDENT> self.walls["left"] = False <NEW_LINE> return True, "" <NEW_LINE> <DEDENT> elif self.col - neighbour_col == -1: <NEW_LINE> <INDENT> self.walls["right"] = False <NEW_LINE> return True, "" <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def set_as_entry_exit(self, entry_exit, row_limit, col_limit): <NEW_LINE> <INDENT> if self.row == 0: <NEW_LINE> <INDENT> self.walls["top"] = False <NEW_LINE> <DEDENT> elif self.row == row_limit: <NEW_LINE> <INDENT> self.walls["bottom"] = False <NEW_LINE> <DEDENT> elif self.col == 0: <NEW_LINE> <INDENT> self.walls["left"] = False <NEW_LINE> <DEDENT> elif self.col == col_limit: <NEW_LINE> <INDENT> self.walls["right"] = False <NEW_LINE> <DEDENT> self.is_entry_exit = entry_exit
Class for representing a cell in a 2D grid. Attributes: row (int): The row that this cell belongs to col (int): The column that this cell belongs to visited (bool): True if this cell has been visited by an algorithm active (bool): is_entry_exit (bool): True when the cell is the beginning or end of the maze walls (list): neighbours (list):
6259907d3617ad0b5ee07bad
@enum.unique <NEW_LINE> class WinSpecTimingMode(enum.IntEnum): <NEW_LINE> <INDENT> free_run = 1 <NEW_LINE> external_sync = 3
Enum for specification of the WinSpec timing mode. Attributes free_run (int): Alias for free run mode. Corresponds to value 1. external_sync (int): Alias for external trigger mode. Corresponds to value 3.
6259907d55399d3f05627f74
class TIMEOUT(Error): <NEW_LINE> <INDENT> pass
Timeout condition.
6259907d44b2445a339b768d
class TrialFramesDir(FileFinder): <NEW_LINE> <INDENT> glob_pattern = '*frames*' <NEW_LINE> @classmethod <NEW_LINE> def generate_name(self, dirname): <NEW_LINE> <INDENT> return os.path.join(dirname, 'frames')
Finds directory containing frames at time of retraction
6259907df548e778e596cff1
@logger.init('gui', 'DEBUG') <NEW_LINE> class Folder(widgets.Push): <NEW_LINE> <INDENT> def __init__(self, title, widget, parent=None): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.widget = widget <NEW_LINE> super(Folder, self).__init__(icon=qt.IMAGES['folder_icon'], connect=self.__connect, parent=parent) <NEW_LINE> self.setStyleSheet("") <NEW_LINE> <DEDENT> @property <NEW_LINE> def qt_height(self): <NEW_LINE> <INDENT> return self.widget.frameSize().height() <NEW_LINE> <DEDENT> @property <NEW_LINE> def qt_size(self): <NEW_LINE> <INDENT> return QtCore.QSize(self.qt_height, self.qt_height) <NEW_LINE> <DEDENT> def resize(self): <NEW_LINE> <INDENT> self.setFixedSize(self.qt_size) <NEW_LINE> self.setIconSize(self.qt_size) <NEW_LINE> <DEDENT> def __connect(self): <NEW_LINE> <INDENT> path = self.widget.text() <NEW_LINE> text = qtio.getopendir(self, self.title, path) <NEW_LINE> self.widget.store_from_value(text)
Subclass that resizes to a fixed size with a bound folder icon
6259907d5fdd1c0f98e5f9df
class RelatedCollectionsModule(Model): <NEW_LINE> <INDENT> _validation = { 'value': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[ImageGallery]'}, } <NEW_LINE> def __init__(self, **kwargs) -> None: <NEW_LINE> <INDENT> super(RelatedCollectionsModule, self).__init__(**kwargs) <NEW_LINE> self.value = None
Defines a list of webpages that contain related images. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: A list of webpages that contain related images. :vartype value: list[~azure.cognitiveservices.search.imagesearch.models.ImageGallery]
6259907d283ffb24f3cf5300
class AzureFirewallRCAction(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(AzureFirewallRCAction, self).__init__(**kwargs) <NEW_LINE> self.type = kwargs.get('type', None)
Properties of the AzureFirewallRCAction. :param type: The type of action. Possible values include: "Allow", "Deny". :type type: str or ~azure.mgmt.network.v2018_10_01.models.AzureFirewallRCActionType
6259907d656771135c48ad60
class Gradient_Descent(BaseAlgorithm): <NEW_LINE> <INDENT> requires = 'real' <NEW_LINE> def __init__(self, space, learning_rate=1., dx_tolerance=1e-7): <NEW_LINE> <INDENT> super(Gradient_Descent, self).__init__(space, learning_rate=learning_rate, dx_tolerance=dx_tolerance) <NEW_LINE> self.has_observed_once = False <NEW_LINE> self.current_point = None <NEW_LINE> self.gradient = numpy.array([numpy.inf]) <NEW_LINE> <DEDENT> def suggest(self, num=1): <NEW_LINE> <INDENT> assert num == 1 <NEW_LINE> if not self.has_observed_once: <NEW_LINE> <INDENT> return self.space.sample(1) <NEW_LINE> <DEDENT> self.current_point -= self.learning_rate * self.gradient <NEW_LINE> return [self.current_point] <NEW_LINE> <DEDENT> def observe(self, points, results): <NEW_LINE> <INDENT> self.current_point = numpy.asarray(points[-1]) <NEW_LINE> self.gradient = numpy.asarray(results[-1]['gradient']) <NEW_LINE> self.has_observed_once = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_done(self): <NEW_LINE> <INDENT> dx = self.learning_rate * numpy.sqrt(self.gradient.dot(self.gradient)) <NEW_LINE> return dx <= self.dx_tolerance
Implement a gradient descent algorithm.
6259907d4f88993c371f1252
class fds_postfinance_account_sepa(models.Model): <NEW_LINE> <INDENT> _inherit = 'fds.postfinance.account' <NEW_LINE> sepa_upload_ids = fields.One2many( comodel_name='fds.sepa.upload.history', inverse_name='fds_account_id', readonly=True, )
Add SEPA upload history to the model fds.postfinance.account
6259907d4f6381625f19a1de
class CalendarFilter(Filter): <NEW_LINE> <INDENT> content_type = "text/calendar" <NEW_LINE> def __init__(self, default_timezone): <NEW_LINE> <INDENT> self.tzify = lambda dt: as_tz_aware_ts(dt, default_timezone) <NEW_LINE> self.children = [] <NEW_LINE> <DEDENT> def filter_subcomponent(self, name, is_not_defined=False, time_range=None): <NEW_LINE> <INDENT> ret = ComponentFilter( name=name, is_not_defined=is_not_defined, time_range=time_range ) <NEW_LINE> self.children.append(ret) <NEW_LINE> return ret <NEW_LINE> <DEDENT> def check(self, name, file): <NEW_LINE> <INDENT> c = file.calendar <NEW_LINE> if c is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for child_filter in self.children: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not child_filter.match(file.calendar, self.tzify): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> except MissingProperty as e: <NEW_LINE> <INDENT> logging.warning( "calendar_query: Ignoring calendar object %s, due " "to missing property %s", name, e.property_name, ) <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def check_from_indexes(self, name, indexes): <NEW_LINE> <INDENT> for child_filter in self.children: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not child_filter.match_indexes(indexes, self.tzify): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> except MissingProperty as e: <NEW_LINE> <INDENT> logging.warning( "calendar_query: Ignoring calendar object %s, due " "to missing property %s", name, e.property_name, ) <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def index_keys(self): <NEW_LINE> <INDENT> subindexes = [] <NEW_LINE> for child in self.children: <NEW_LINE> <INDENT> subindexes.extend(child.index_keys()) <NEW_LINE> <DEDENT> return subindexes <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r)" % (self.__class__.__name__, self.children)
A filter that works on ICalendar files.
6259907d5fcc89381b266e8c
class ParserError(Exception): <NEW_LINE> <INDENT> def __init__(self, file, error): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> self.error = error <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Parsing failed for {file} due to {error}".format( file=self.file, error=self.error)
Exception thrown when parsing does not complete
6259907d3d592f4c4edbc88f
class DomainTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_ListDomains_RegisterDomain_DescribeDomain_DeprecateDomain(self): <NEW_LINE> <INDENT> swf = util.BasicSwfSetup(self) <NEW_LINE> new_domain_name = util.create_new_name() <NEW_LINE> domain_pager = swf.client.get_paginator('list_domains') <NEW_LINE> known_domains = [] <NEW_LINE> for page in domain_pager.paginate( registrationStatus='REGISTERED', reverseOrder=False): <NEW_LINE> <INDENT> if 'domainInfos' in page: <NEW_LINE> <INDENT> for domainInfo in page['domainInfos']: <NEW_LINE> <INDENT> known_domains.append(domainInfo['name']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> swf.client.register_domain( name=new_domain_name, description='domain for ' + new_domain_name, workflowExecutionRetentionPeriodInDays='1' ) <NEW_LINE> domain_pager = swf.client.get_paginator('list_domains') <NEW_LINE> found = False <NEW_LINE> for page in domain_pager.paginate( registrationStatus='REGISTERED', maximumPageSize=10, reverseOrder=False): <NEW_LINE> <INDENT> if 'domainInfos' in page: <NEW_LINE> <INDENT> for domainInfo in page['domainInfos']: <NEW_LINE> <INDENT> self.assertTrue( domainInfo['name'] in known_domains or domainInfo['name'] == new_domain_name, 'Found unexpected domain {0}'.format(domainInfo['name'])) <NEW_LINE> found = found or domainInfo['name'] == new_domain_name <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.assertTrue(found, 'Did not find new domain') <NEW_LINE> domain = swf.client.describe_domain(name=new_domain_name) <NEW_LINE> self.assertEqual(new_domain_name, domain['domainInfo']['name'], 'incorrect name') <NEW_LINE> self.assertEqual('REGISTERED', domain['domainInfo']['status'], 'incorrect status') <NEW_LINE> self.assertEqual('domain for ' + new_domain_name, domain['domainInfo']['description'], 'incorrect description') <NEW_LINE> <DEDENT> def test_Page_ListDomains(self): <NEW_LINE> <INDENT> swf = util.BasicSwfSetup(self) <NEW_LINE> created_domain_names = [] <NEW_LINE> for i in range(0, 100): <NEW_LINE> <INDENT> new_domain_name = util.create_new_name() <NEW_LINE> created_domain_names.append(new_domain_name) <NEW_LINE> swf.client.register_domain( name=new_domain_name, description='domain for ' + new_domain_name, workflowExecutionRetentionPeriodInDays='1' ) <NEW_LINE> <DEDENT> domain_pager = swf.client.get_paginator('list_domains') <NEW_LINE> for page in domain_pager.paginate( registrationStatus='REGISTERED', maximumPageSize=10, reverseOrder=False): <NEW_LINE> <INDENT> if 'domainInfos' in page: <NEW_LINE> <INDENT> for domainInfo in page['domainInfos']: <NEW_LINE> <INDENT> if domainInfo['name'] in created_domain_names: <NEW_LINE> <INDENT> created_domain_names.remove(domainInfo['name']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> self.assertTrue(len(created_domain_names) <= 0, 'Did not find new domains ' + repr(created_domain_names))
Integration tests for SWF domains
6259907df9cc0f698b1c5ffd
class FloatParamUpdate: <NEW_LINE> <INDENT> def __init__( self, name, value): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> def __eq__(self, to_compare): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.name == to_compare.name) and (self.value == to_compare.value) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> struct_repr = ", ".join([ "name: " + str(self.name), "value: " + str(self.value) ]) <NEW_LINE> return f"FloatParamUpdate: [{struct_repr}]" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def translate_from_rpc(rpcFloatParamUpdate): <NEW_LINE> <INDENT> return FloatParamUpdate( rpcFloatParamUpdate.name, rpcFloatParamUpdate.value ) <NEW_LINE> <DEDENT> def translate_to_rpc(self, rpcFloatParamUpdate): <NEW_LINE> <INDENT> rpcFloatParamUpdate.name = self.name <NEW_LINE> rpcFloatParamUpdate.value = self.value
A float param that has been updated. Parameters ---------- name : std::string Name of param that changed value : float New value of param
6259907de1aae11d1e7cf542
class ExpressRouteServiceProvidersOperations(object): <NEW_LINE> <INDENT> models = models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self.api_version = "2016-12-01" <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def list( self, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteServiceProviders' <NEW_LINE> path_format_arguments = { 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.ExpressRouteServiceProviderPaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.ExpressRouteServiceProviderPaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
ExpressRouteServiceProvidersOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An objec model deserializer. :ivar api_version: Client API version. Constant value: "2016-12-01".
6259907d97e22403b383c962
class CesePeriodic(periodic): <NEW_LINE> <INDENT> def init(self, **kw): <NEW_LINE> <INDENT> svr = self.svr <NEW_LINE> ngstcell = svr.ngstcell <NEW_LINE> ngstface = svr.ngstface <NEW_LINE> facn = self.facn <NEW_LINE> slctm = self.rclp[:,0] + ngstcell <NEW_LINE> slctr = self.rclp[:,1] + ngstcell <NEW_LINE> shf = svr.cecnd[slctr,0,:] - svr.fccnd[facn[:,2]+ngstface,:] <NEW_LINE> svr.cecnd[slctm,0,:] = svr.fccnd[facn[:,0]+ngstface,:] + shf <NEW_LINE> <DEDENT> def soln(self): <NEW_LINE> <INDENT> svr = self.svr <NEW_LINE> slctm = self.rclp[:,0] + svr.ngstcell <NEW_LINE> slctr = self.rclp[:,1] + svr.ngstcell <NEW_LINE> svr.soln[slctm,:] = svr.soln[slctr,:] <NEW_LINE> <DEDENT> def dsoln(self): <NEW_LINE> <INDENT> svr = self.svr <NEW_LINE> slctm = self.rclp[:,0] + svr.ngstcell <NEW_LINE> slctr = self.rclp[:,1] + svr.ngstcell <NEW_LINE> svr.dsoln[slctm,:,:] = svr.dsoln[slctr,:,:]
General periodic boundary condition for sequential runs.
6259907d1b99ca4002290266
class OpForbidden(Exception): <NEW_LINE> <INDENT> pass
Operation forbidden exception.
6259907d4527f215b58eb6d1
class TestingConfig(Config): <NEW_LINE> <INDENT> DEBUG = True
configurations for testing environment
6259907d167d2b6e312b82c5
class List(Container): <NEW_LINE> <INDENT> def iter_and_match(self, path, val): <NEW_LINE> <INDENT> for i in range(len(val)): <NEW_LINE> <INDENT> self.match_spec(self.spec, "%s[%d]" % (path, i), val[i])
Spec is a Type that is matched against all elements
6259907d7b180e01f3e49d96