code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class TemperatureMonitor(pypot.primitive.LoopPrimitive): <NEW_LINE> <INDENT> def __init__(self, robot, freq=0.5, temp_limit=55, player=None, sound=None): <NEW_LINE> <INDENT> pypot.primitive.LoopPrimitive.__init__(self, robot, freq) <NEW_LINE> self.temp_limit = temp_limit <NEW_LINE> self.sound = sound <NEW_LINE> self.watched_motors = self.robot.legs + self.robot.torso + self.robot.arms <NEW_LINE> if player is not None: <NEW_LINE> <INDENT> self.player = player <NEW_LINE> <DEDENT> elif 'Windows' in platform.system(): <NEW_LINE> <INDENT> self.player = 'wmplayer' <NEW_LINE> <DEDENT> elif 'Darwin' in platform.system(): <NEW_LINE> <INDENT> self.player = 'afplay' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.player = 'aplay' <NEW_LINE> <DEDENT> <DEDENT> def setup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.check_temperature() <NEW_LINE> <DEDENT> def teardown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def check_temperature(self): <NEW_LINE> <INDENT> motor_list = [] <NEW_LINE> for m in self.watched_motors: <NEW_LINE> <INDENT> if m.present_temperature > self.temp_limit: <NEW_LINE> <INDENT> motor_list.append(m) <NEW_LINE> <DEDENT> <DEDENT> if len(motor_list) > 0: <NEW_LINE> <INDENT> self.raise_problem(motor_list) <NEW_LINE> <DEDENT> <DEDENT> def raise_problem(self, motor_list): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> subprocess.call([self.player, self.sound]) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> logger.warning('Sound player {} cannot be started'.format(self.player)) <NEW_LINE> <DEDENT> for m in motor_list: <NEW_LINE> <INDENT> print('{} overheating: {}'.format(m.name, m.present_temperature))
This primitive raises an alert by playing a sound when the temperature of one motor reaches the "temp_limit". On GNU/Linux you can use "aplay" for player On MacOS "Darwin" you can use "afplay" for player On windows vista+, you can maybe use "start wmplayer"
625990582ae34c7f260ac68a
class Quad: <NEW_LINE> <INDENT> def __init__(self, x0: Union[int, float], y0: Union[int, float], x1: Union[int, float], y1: Union[int, float], x2: Union[int, float], y2: Union[int, float], x3: Union[int, float], y3: Union[int, float]): <NEW_LINE> <INDENT> self.x0 = x0 <NEW_LINE> self.y0 = y0 <NEW_LINE> self.x1 = x1 <NEW_LINE> self.y1 = y1 <NEW_LINE> self.x2 = x2 <NEW_LINE> self.y2 = y2 <NEW_LINE> self.x3 = x3 <NEW_LINE> self.y3 = y3 <NEW_LINE> <DEDENT> def as_tuple(self) -> Tuple: <NEW_LINE> <INDENT> return self.x0, self.y0, self.x1, self.y1, self.x2, self.y2, self.x3, self.y3 <NEW_LINE> <DEDENT> def as_list(self) -> List[Union[int, float]]: <NEW_LINE> <INDENT> return list(self.as_tuple()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_corner(cls, grid: np.ndarray, i: int, j: int): <NEW_LINE> <INDENT> return cls(grid[i, j, 0], grid[i, j, 1], grid[i + 1, j, 0], grid[i + 1, j, 1], grid[i + 1, j + 1, 0], grid[i + 1, j + 1, 1], grid[i, j + 1, 0], grid[i, j + 1, 1])
(x_0, y_0) --------- (x_3, y_3) . . . . . . . . (x_1, y_1) ---------- (x_2, y_2)
62599058498bea3a75a590c9
class Queue(object): <NEW_LINE> <INDENT> def __init__(self, config, queue): <NEW_LINE> <INDENT> self.connection = pika.BlockingConnection( pika.URLParameters(config['queue_url'])) <NEW_LINE> self.config = config <NEW_LINE> self.channel = self.connection.channel() <NEW_LINE> self.channel.queue_declare(queue=queue) <NEW_LINE> self.queue = queue <NEW_LINE> <DEDENT> def publish(self, record): <NEW_LINE> <INDENT> self.channel.basic_publish( exchange='', routing_key=self.queue, body=json.dumps(record)) <NEW_LINE> <DEDENT> def push_records(self, records): <NEW_LINE> <INDENT> for record in records: <NEW_LINE> <INDENT> self.publish(record) <NEW_LINE> <DEDENT> <DEDENT> def consume(self, callback, params): <NEW_LINE> <INDENT> def _callback(channel, method, properties, body): <NEW_LINE> <INDENT> record = json.loads(body) <NEW_LINE> callback(record, params) <NEW_LINE> channel.basic_ack(delivery_tag=method.delivery_tag) <NEW_LINE> <DEDENT> self.channel.basic_qos(prefetch_count=self.config['pull_batch_size']) <NEW_LINE> self.channel.basic_consume(_callback, queue=self.queue) <NEW_LINE> self.channel.start_consuming()
Wrapper class for queue
6259905832920d7e50bc75e9
class UConverter(object): <NEW_LINE> <INDENT> default_encodings = ['utf-8', 'ascii', 'utf-16'] <NEW_LINE> def __init__(self, hint_encodings=None): <NEW_LINE> <INDENT> if hint_encodings: <NEW_LINE> <INDENT> self.encodings = hint_encodings <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.encodings = self.default_encodings[:] <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, value): <NEW_LINE> <INDENT> if isinstance(value, six.text_type): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if not isinstance(value, six.binary_type): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = six.text_type(value) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = six.binary_type(value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise UnicodeError('unable to convert to unicode %r' '' % (value,)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> for ln in self.encodings: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = six.text_type(value, ln) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> <DEDENT> raise UnicodeError('unable to convert to unicode %r' % (value,))
Simple converter to unicode Create instance with specified list of encodings to be used to try to convert value to unicode Example:: ustr = UConverter(['utf-8', 'cp-1251']) my_unicode_str = ustr(b'hello - привет')
625990580a50d4780f706890
class VisitScheduleMethodsModelMixin(models.Model): <NEW_LINE> <INDENT> @property <NEW_LINE> def visit(self) -> Visit: <NEW_LINE> <INDENT> return self.visit_from_schedule <NEW_LINE> <DEDENT> @property <NEW_LINE> def visit_from_schedule(self: VisitScheduleStub) -> Visit: <NEW_LINE> <INDENT> visit = self.schedule.visits.get(self.visit_code) <NEW_LINE> if not visit: <NEW_LINE> <INDENT> raise VisitScheduleModelMixinError( f"Visit not found in schedule. Expected one of {self.schedule.visits}. " f"Got {self.visit_code}." ) <NEW_LINE> <DEDENT> return visit <NEW_LINE> <DEDENT> @property <NEW_LINE> def visits(self: VisitScheduleStub) -> VisitCollection: <NEW_LINE> <INDENT> return self.schedule.visits <NEW_LINE> <DEDENT> @property <NEW_LINE> def schedule(self: VisitScheduleStub) -> Schedule: <NEW_LINE> <INDENT> return self.visit_schedule.schedules.get(self.schedule_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def visit_schedule(self: VisitScheduleStub) -> VisitSchedule: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> visit_schedule_name, _ = self._meta.visit_schedule_name.split(".") <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> visit_schedule_name = self._meta.visit_schedule_name <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> visit_schedule_name = self.visit_schedule_name <NEW_LINE> <DEDENT> return site_visit_schedules.get_visit_schedule(visit_schedule_name=visit_schedule_name) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> abstract = True
A model mixin that adds methods used to work with the visit schedule. Declare with VisitScheduleFieldsModelMixin or the fields from VisitScheduleFieldsModelMixin
625990587d847024c075d97f
class RemoveRoleFromInstanceProfileInputSet(InputSet): <NEW_LINE> <INDENT> def set_AWSAccessKeyId(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'AWSAccessKeyId', value) <NEW_LINE> <DEDENT> def set_AWSSecretKeyId(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'AWSSecretKeyId', value) <NEW_LINE> <DEDENT> def set_InstanceProfileName(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'InstanceProfileName', value) <NEW_LINE> <DEDENT> def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ResponseFormat', value) <NEW_LINE> <DEDENT> def set_RoleName(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'RoleName', value)
An InputSet with methods appropriate for specifying the inputs to the RemoveRoleFromInstanceProfile Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
62599058d99f1b3c44d06c44
class SearchForm(Form): <NEW_LINE> <INDENT> query = StringField()
docstring for SearchForm
625990588e71fb1e983bd06d
class InlineResponse20011D(object): <NEW_LINE> <INDENT> def __init__(self, results=None): <NEW_LINE> <INDENT> self.swagger_types = { 'results': 'list[Project]' } <NEW_LINE> self.attribute_map = { 'results': 'results' } <NEW_LINE> self._results = results <NEW_LINE> <DEDENT> @property <NEW_LINE> def results(self): <NEW_LINE> <INDENT> return self._results <NEW_LINE> <DEDENT> @results.setter <NEW_LINE> def results(self, results): <NEW_LINE> <INDENT> self._results = results <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599058d7e4931a7ef3d623
class decoratorbase(object): <NEW_LINE> <INDENT> def __init__(self, *iargs, **ikwargs): <NEW_LINE> <INDENT> self._iargs = iargs <NEW_LINE> self._ikwargs = ikwargs <NEW_LINE> self._cargs = None <NEW_LINE> self._fargs = None <NEW_LINE> self._fkwargs = None <NEW_LINE> if len(iargs) > 0: <NEW_LINE> <INDENT> raise ImportError('This decorator requires parameters, like this: @num_retries()') <NEW_LINE> <DEDENT> if _DEBUG_DECORATOR: <NEW_LINE> <INDENT> sys.stderr.write('INIT phase: %s %s\n' % (repr(self._iargs),repr(self._ikwargs))) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, *cargs): <NEW_LINE> <INDENT> self._cargs = cargs <NEW_LINE> if _DEBUG_DECORATOR: <NEW_LINE> <INDENT> sys.stderr.write('SETUP phase %s %s %s %s %s\n' % (repr(self._iargs),repr(self._ikwargs),repr(self._cargs),repr(self._fargs),repr(self._fkwargs))) <NEW_LINE> <DEDENT> def wrapped_f(*fargs,**fkwargs): <NEW_LINE> <INDENT> self._fargs=fargs <NEW_LINE> self._fkwargs=fkwargs <NEW_LINE> if _DEBUG_DECORATOR: <NEW_LINE> <INDENT> sys.stderr.write('WRAP phase: _iargs=%s | _ikwargs=%s | _cargs=%s | _fargs=%s | _fkwargs=%s\n' % (repr(self._iargs),repr(self._ikwargs),repr(self._cargs),repr(self._fargs),repr(self._fkwargs))) <NEW_LINE> <DEDENT> if len(self._cargs) >= 1: <NEW_LINE> <INDENT> if _DEBUG_DECORATOR: <NEW_LINE> <INDENT> sys.stderr.write('RUN phase: start.\n') <NEW_LINE> <DEDENT> oRet=self.runphase() <NEW_LINE> if _DEBUG_DECORATOR: <NEW_LINE> <INDENT> sys.stderr.write('RUN phase: done.\n') <NEW_LINE> <DEDENT> return oRet <NEW_LINE> <DEDENT> <DEDENT> return wrapped_f <NEW_LINE> <DEDENT> def runphase(self, *cargs): <NEW_LINE> <INDENT> oFallback = _runfallback(*self._cargs, **self._ikwargs) <NEW_LINE> if len(self._fargs) > 0: <NEW_LINE> <INDENT> oFallback.oCallSelf = self._fargs[0] <NEW_LINE> oFallback.tFuncArgs = self._fargs[1:] if len(self._fargs) > 1 else [] <NEW_LINE> <DEDENT> if len(self._fkwargs) > 0: <NEW_LINE> <INDENT> oFallback.tFuncKwArgs = self._fkwargs <NEW_LINE> <DEDENT> return oFallback.execute(oFallback.oCallSelf)
Decorator base. Works for functions as well as methods. Parenthesis are mandatory, like this: @num_retries() You can use any parameters of the constructor in the linked object, like this: @num_retries(max_tries=4,retry_seconds=[1.0,2.0],exc_types=[TimeoutError])
6259905821bff66bcd724208
class XenElf64CoreDump(addrspace.PagedReader): <NEW_LINE> <INDENT> order = 30 <NEW_LINE> __name = "xenelf64" <NEW_LINE> __image = True <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(XenElf64CoreDump, self).__init__(**kwargs) <NEW_LINE> self.check_file() <NEW_LINE> self.offset = 0 <NEW_LINE> self.fname = '' <NEW_LINE> self._metadata = {} <NEW_LINE> self.elf_profile = elf.ELFProfile(session=self.session) <NEW_LINE> self.elf64_hdr = self.elf_profile.elf64_hdr(vm=self.base, offset=0) <NEW_LINE> self.as_assert(self.elf64_hdr.e_type == "ET_CORE", "Elf file is not a core file.") <NEW_LINE> xen_note = self.elf64_hdr.section_by_name(".note.Xen") <NEW_LINE> self.as_assert(xen_note, "Image does not contain Xen note.") <NEW_LINE> self.name = "%s|%s" % (self.__class__.__name__, self.base.name) <NEW_LINE> self.runs = self.build_runs() <NEW_LINE> <DEDENT> def build_runs(self): <NEW_LINE> <INDENT> pages = self.elf64_hdr.section_by_name(".xen_pages") <NEW_LINE> self.pages_offset = pages.sh_offset.v() <NEW_LINE> self.as_assert(pages, "Image does not contain Xen pages.") <NEW_LINE> pfn_map = self.elf64_hdr.section_by_name(".xen_pfn") <NEW_LINE> self.max_pfn = 0 <NEW_LINE> runs = {} <NEW_LINE> if pfn_map: <NEW_LINE> <INDENT> pfn_map_data = self.base.read(pfn_map.sh_offset, pfn_map.sh_size) <NEW_LINE> for i, pfn in enumerate( struct.unpack("Q" * (len(pfn_map_data) // 8 ), pfn_map_data)): <NEW_LINE> <INDENT> self.session.report_progress( "Adding run %s to PFN %08x", i, pfn) <NEW_LINE> runs[pfn] = i <NEW_LINE> self.max_pfn = max(self.max_pfn, pfn) <NEW_LINE> <DEDENT> <DEDENT> return runs <NEW_LINE> <DEDENT> def vtop(self, vaddr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.runs[vaddr // self.PAGE_SIZE] * self.PAGE_SIZE + self.pages_offset + vaddr % self.PAGE_SIZE) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def check_file(self): <NEW_LINE> <INDENT> self.as_assert(self.base, "Must stack on another address space") <NEW_LINE> self.as_assert((self.base.read(0, 4) == b"\177ELF"), "Header signature invalid") <NEW_LINE> <DEDENT> def get_mappings(self, start=0, end=2**64): <NEW_LINE> <INDENT> for run_pfn in sorted(self.runs): <NEW_LINE> <INDENT> start = run_pfn * self.PAGE_SIZE <NEW_LINE> run = addrspace.Run(start=start, end=start + self.PAGE_SIZE, file_offset=self.vtop(start), address_space=self.base) <NEW_LINE> yield run <NEW_LINE> <DEDENT> <DEDENT> def end(self): <NEW_LINE> <INDENT> return self.max_pfn
An Address space to support XEN memory dumps. https://xenbits.xen.org/docs/4.8-testing/misc/dump-core-format.txt
6259905845492302aabfda7c
class VarComp(Term): <NEW_LINE> <INDENT> def __init__(self, name, values, prior=None, metadata=None): <NEW_LINE> <INDENT> super(VarComp, self).__init__(name, values, categorical=True, prior=prior, metadata=metadata) <NEW_LINE> self.index_vec = self.dummies_to_vec(values) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def dummies_to_vec(dummies): <NEW_LINE> <INDENT> vec = np.zeros(len(dummies), dtype=int) <NEW_LINE> for i in range(dummies.shape[1]): <NEW_LINE> <INDENT> vec[(dummies[:, i] == 1)] = i + 1 <NEW_LINE> <DEDENT> return vec
Represents a variance component/random effect. Parameters ---------- name : str The name of the variance component. values : iterable A 2d binary array identifying the observations that belong to the levels of the variance component. Has dimension n x k, where n is the number of observed rows in the dataset and k is the number of levels in the factor. prior : dict Optional specification of the prior distribution for the VarComp. metadata : dict Arbitrary metadata to store internally.
6259905873bcbd0ca4bcb837
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA) <NEW_LINE> class ListBeta(ListGA): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> AddFlags(parser, False)
List Google Compute Engine operations.
625990582ae34c7f260ac68b
class StudentTestAdd(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.c = Client() <NEW_LINE> <DEDENT> def test_create(self): <NEW_LINE> <INDENT> response = self.c.post('/opencmis/student/add/', { 'status': 14, 'title': 1, 'first_name': 'Jacob', 'last_name': 'Green', 'date_of_birth': '12/03/1996', 'gender': 'U', 'ethnicity': 31, } ) <NEW_LINE> self.assertEqual(response.status_code, 200)
Test to see if we can add a student via simulated form post
625990584e4d5625663739ab
class ClimbHab2(CommandGroup): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__("ClimbHab2") <NEW_LINE> self.logger = logging.getLogger("ClimbHab2") <NEW_LINE> self.addParallel(ChassisStop()) <NEW_LINE> self.addSequential(LiftSet(Position.CLIMB2)) <NEW_LINE> self.addSequential(LiftDrive2(0.5, 2)) <NEW_LINE> self.addSequential(LiftSet(Position.LBACK2)) <NEW_LINE> self.addSequential(ChassisDrive(0.3, 0.0, 2)) <NEW_LINE> self.addParallel(ChassisStop()) <NEW_LINE> self.addSequential(LiftSet(Position.FLUSH)) <NEW_LINE> self.addSequential(ChassisDrive(0.3, 0.0, 2)) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> self.logger.warning("Starting to climb Hab 2") <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def isFinished(self): <NEW_LINE> <INDENT> return not any(entry.command.isRunning() for entry in self.commands) <NEW_LINE> <DEDENT> def interrupted(self): <NEW_LINE> <INDENT> self.end() <NEW_LINE> <DEDENT> def end(self): <NEW_LINE> <INDENT> self.logger.warning("Finished climbing Hab 2")
CommandGroup for climbing Hab 2
6259905816aa5153ce401a88
@dataclass <NEW_LINE> class P157IsAtRestRelativeTo: <NEW_LINE> <INDENT> URI = "http://erlangen-crm.org/current/P157_is_at_rest_relative_to"
Scope note: This property associates an instance of E53 Place with the instance of E18 Physical Thing that determines a reference space for this instance of E53 Place by being at rest with respect to this reference space. The relative stability of form of an instance of E18 Physical Thing defines its default reference space. The reference space is not spatially limited to the referred thing. For example, a ship determines a reference space in terms of which other ships in its neighbourhood may be described. Larger constellations of matter, such as continental plates, may comprise many physical features that are at rest with them and define the same reference space. Examples: - The spatial extent of the municipality of Athens in 2014 (E53) is at rest relative to The Royal Observatory in Greenwich (E25) - The place where Lord Nelson died on H.M.S. Victory (E53) is at rest relative to H.M.S. Victory (E22) In First Order Logic: P157(x,y) &#8835; E53(x) P157(x,y) &#8835; E18(y)
6259905891f36d47f2231962
class NL_TSTL_TakensEstimator(HCTSASuper): <NEW_LINE> <INDENT> KNOWN_OUTPUTS_SIZES = (1,) <NEW_LINE> TAGS = ('crptool', 'dimension', 'nonlinear', 'scaling', 'takens', 'tstool') <NEW_LINE> def __init__(self, Nref=-1.0, rad=0.05, past=0.05, embedParams=('mi', 3), randomSeed=None): <NEW_LINE> <INDENT> super(NL_TSTL_TakensEstimator, self).__init__() <NEW_LINE> self.Nref = Nref <NEW_LINE> self.rad = rad <NEW_LINE> self.past = past <NEW_LINE> self.embedParams = embedParams <NEW_LINE> self.randomSeed = randomSeed <NEW_LINE> <DEDENT> def _eval_hook(self, eng, x): <NEW_LINE> <INDENT> if self.Nref is None: <NEW_LINE> <INDENT> return eng.run_function(1, 'NL_TSTL_TakensEstimator', x, ) <NEW_LINE> <DEDENT> elif self.rad is None: <NEW_LINE> <INDENT> return eng.run_function(1, 'NL_TSTL_TakensEstimator', x, self.Nref) <NEW_LINE> <DEDENT> elif self.past is None: <NEW_LINE> <INDENT> return eng.run_function(1, 'NL_TSTL_TakensEstimator', x, self.Nref, self.rad) <NEW_LINE> <DEDENT> elif self.embedParams is None: <NEW_LINE> <INDENT> return eng.run_function(1, 'NL_TSTL_TakensEstimator', x, self.Nref, self.rad, self.past) <NEW_LINE> <DEDENT> elif self.randomSeed is None: <NEW_LINE> <INDENT> return eng.run_function(1, 'NL_TSTL_TakensEstimator', x, self.Nref, self.rad, self.past, self.embedParams) <NEW_LINE> <DEDENT> return eng.run_function(1, 'NL_TSTL_TakensEstimator', x, self.Nref, self.rad, self.past, self.embedParams, self.randomSeed)
Matlab doc: ---------------------------------------- % % cf. "Detecting strange attractors in turbulence", F. Takens. % Lect. Notes Math. 898 p366 (1981) % %---INPUTS: % y, the input time series % Nref, the number of reference points (can be -1 to use all points) % rad, the maximum search radius (as a proportion of the attractor size) % past, the Theiler window % embedParams, the embedding parameters for BF_embed, in the form {tau,m} % %---OUTPUT: the Taken's estimator of the correlation dimension, d2. % % Uses the TSTOOL code, takens_estimator. % TSTOOL: http://www.physik3.gwdg.de/tstool/ ----------------------------------------
6259905824f1403a926863a1
class NewAccountRequest(namedtuple('NewAccountRequest', [ 'user_name', 'real_name', 'is_group', 'calnet_uid', 'callink_oid', 'email', 'encrypted_password', 'handle_warnings', ])): <NEW_LINE> <INDENT> WARNINGS_WARN = 'warn' <NEW_LINE> WARNINGS_SUBMIT = 'submit' <NEW_LINE> WARNINGS_CREATE = 'create' <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> return { field: getattr(self, field) for field in self._fields if field != 'encrypted_password' }
Request for account creation. :param user_name: :param real_name: :param is_group: :param calnet_uid: uid (or None) :param callink_oid: oid (or None) :param email: :param encrypted_password: :param handle_warnings: one of WARNINGS_WARN, WARNINGS_SUBMIT, WARNINGS_CREATE WARNINGS_WARN: don't create account, return warnings WARNINGS_SUBMIT: don't create account, submit for staff approval WARNINGS_CREATE: create the account anyway
62599058004d5f362081fabf
class JobStartedEvent(Event): <NEW_LINE> <INDENT> pass
event
62599058b5575c28eb71379e
class UniversalCodeCounter(list): <NEW_LINE> <INDENT> def __init__(self, parsed_code, filename): <NEW_LINE> <INDENT> self.NLOC = 0 <NEW_LINE> self.current_function = None <NEW_LINE> self.filename = filename <NEW_LINE> self.functionInfos = [] <NEW_LINE> for fun in self._functions(parsed_code.process()): <NEW_LINE> <INDENT> self.append(fun) <NEW_LINE> <DEDENT> self.LOC = parsed_code.get_current_line() <NEW_LINE> self._summarize() <NEW_LINE> <DEDENT> def countCode(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def START_NEW_FUNCTION(self, name_and_line): <NEW_LINE> <INDENT> self.current_function = FunctionInfo(*name_and_line) <NEW_LINE> <DEDENT> def CONDITION(self, token): <NEW_LINE> <INDENT> self.TOKEN(token) <NEW_LINE> self.current_function.add_condition() <NEW_LINE> <DEDENT> def TOKEN(self, text): <NEW_LINE> <INDENT> self.current_function.add_token() <NEW_LINE> <DEDENT> def NEW_LINE(self, token): <NEW_LINE> <INDENT> self.NLOC += 1 <NEW_LINE> if self.current_function is not None: <NEW_LINE> <INDENT> self.current_function.add_non_comment_line() <NEW_LINE> <DEDENT> <DEDENT> def ADD_TO_LONG_FUNCTION_NAME(self, app): <NEW_LINE> <INDENT> self.current_function.add_to_long_name(app) <NEW_LINE> <DEDENT> def ADD_TO_FUNCTION_NAME(self, app): <NEW_LINE> <INDENT> self.current_function.add_to_function_name(app) <NEW_LINE> <DEDENT> def PARAMETER(self, token): <NEW_LINE> <INDENT> self.current_function.add_parameter(token) <NEW_LINE> self.ADD_TO_LONG_FUNCTION_NAME(" " + token) <NEW_LINE> <DEDENT> END_OF_FUNCTION = 1 <NEW_LINE> def _functions(self, parsed_code): <NEW_LINE> <INDENT> for code, text in parsed_code: <NEW_LINE> <INDENT> if code == UniversalCodeCounter.END_OF_FUNCTION: <NEW_LINE> <INDENT> yield self.current_function <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code(self, text) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _summarize(self): <NEW_LINE> <INDENT> self.average_NLOC = 0 <NEW_LINE> self.average_CCN = 0 <NEW_LINE> self.average_token = 0 <NEW_LINE> nloc = 0 <NEW_LINE> ccn = 0 <NEW_LINE> token = 0 <NEW_LINE> for fun in self: <NEW_LINE> <INDENT> nloc += fun.NLOC <NEW_LINE> ccn += fun.cyclomatic_complexity <NEW_LINE> token += fun.token_count <NEW_LINE> <DEDENT> fc = len(self) <NEW_LINE> if fc > 0: <NEW_LINE> <INDENT> self.average_NLOC = nloc / fc <NEW_LINE> self.average_CCN = ccn / fc <NEW_LINE> self.average_token = token / fc <NEW_LINE> <DEDENT> self.NLOC = nloc <NEW_LINE> self.CCN = ccn <NEW_LINE> self.token = token
UniversalCode is the code that is unrelated to any programming languages. The code could be: START_NEW_FUNCTION ADD_TO_FUNCTION_NAME ADD_TO_LONG_FUNCTION_NAME PARAMETER CONDITION TOKEN END_OF_FUNCTION A TokenTranslator will generate UniversalCode.
625990586e29344779b01bf0
class AssertDesignspaceRoundtrip(object): <NEW_LINE> <INDENT> def assertDesignspacesEqual(self, expected, actual, message=''): <NEW_LINE> <INDENT> directory = tempfile.mkdtemp() <NEW_LINE> def git(*args): <NEW_LINE> <INDENT> return subprocess.check_output(["git", "-C", directory] + list(args)) <NEW_LINE> <DEDENT> def clean_git_folder(): <NEW_LINE> <INDENT> with os.scandir(directory) as entries: <NEW_LINE> <INDENT> for entry in entries: <NEW_LINE> <INDENT> if entry.is_file() or entry.is_symlink(): <NEW_LINE> <INDENT> os.remove(entry.path) <NEW_LINE> <DEDENT> elif entry.is_dir() and entry.name != ".git": <NEW_LINE> <INDENT> shutil.rmtree(entry.path) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> designspace_filename = os.path.join(directory, 'test.designspace') <NEW_LINE> git("init") <NEW_LINE> write_designspace_and_UFOs(expected, designspace_filename) <NEW_LINE> for source in expected.sources: <NEW_LINE> <INDENT> normalize_ufo_lib(source.path) <NEW_LINE> normalizeUFO(source.path, floatPrecision=3, writeModTimes=False) <NEW_LINE> <DEDENT> git("add", ".") <NEW_LINE> git("commit", "-m", "expected") <NEW_LINE> clean_git_folder() <NEW_LINE> write_designspace_and_UFOs(actual, designspace_filename) <NEW_LINE> for source in actual.sources: <NEW_LINE> <INDENT> normalize_ufo_lib(source.path) <NEW_LINE> normalizeUFO(source.path, floatPrecision=3, writeModTimes=False) <NEW_LINE> <DEDENT> git("add", ".") <NEW_LINE> status = git("status") <NEW_LINE> diff = git("diff", "--staged", "--src-prefix= original/", "--dst-prefix=roundtrip/") <NEW_LINE> if diff: <NEW_LINE> <INDENT> sys.stderr.write(status) <NEW_LINE> sys.stderr.write(diff) <NEW_LINE> <DEDENT> self.assertEqual(0, len(diff), message) <NEW_LINE> <DEDENT> def assertDesignspaceRoundtrip(self, designspace): <NEW_LINE> <INDENT> directory = tempfile.mkdtemp() <NEW_LINE> font = to_glyphs(designspace, minimize_ufo_diffs=True) <NEW_LINE> roundtrip_in_mem = to_designspace(font, propagate_anchors=False) <NEW_LINE> tmpfont_path = os.path.join(directory, 'font.glyphs') <NEW_LINE> font.save(tmpfont_path) <NEW_LINE> font_rt = classes.GSFont(tmpfont_path) <NEW_LINE> roundtrip = to_designspace(font_rt, propagate_anchors=False) <NEW_LINE> font.save('intermediary.glyphs') <NEW_LINE> write_designspace_and_UFOs(designspace, 'expected/test.designspace') <NEW_LINE> for source in designspace.sources: <NEW_LINE> <INDENT> normalize_ufo_lib(source.path) <NEW_LINE> normalizeUFO(source.path, floatPrecision=3, writeModTimes=False) <NEW_LINE> <DEDENT> write_designspace_and_UFOs(roundtrip, 'actual/test.designspace') <NEW_LINE> for source in roundtrip.sources: <NEW_LINE> <INDENT> normalize_ufo_lib(source.path) <NEW_LINE> normalizeUFO(source.path, floatPrecision=3, writeModTimes=False) <NEW_LINE> <DEDENT> self.assertDesignspacesEqual( roundtrip_in_mem, roundtrip, "The round-trip in memory or written to disk should be equivalent") <NEW_LINE> self.assertDesignspacesEqual( designspace, roundtrip, "The font should not be modified by the roundtrip")
Check UFOs + designspace -> .glyphs -> UFOs + designspace
6259905801c39578d7f14209
class BaseOptions(object): <NEW_LINE> <INDENT> def __init__(self, **kwds): <NEW_LINE> <INDENT> self.debug = False <NEW_LINE> self.log_level = logging.WARN <NEW_LINE> self.service_name = determine_service_name() <NEW_LINE> self.extra_http_headers = None <NEW_LINE> if "INSTANA_DEBUG" in os.environ: <NEW_LINE> <INDENT> self.log_level = logging.DEBUG <NEW_LINE> self.debug = True <NEW_LINE> <DEDENT> if "INSTANA_EXTRA_HTTP_HEADERS" in os.environ: <NEW_LINE> <INDENT> self.extra_http_headers = str(os.environ["INSTANA_EXTRA_HTTP_HEADERS"]).lower().split(';') <NEW_LINE> <DEDENT> self.secrets_matcher = 'contains-ignore-case' <NEW_LINE> self.secrets_list = ['key', 'pass', 'secret'] <NEW_LINE> self.secrets = os.environ.get("INSTANA_SECRETS", None) <NEW_LINE> if self.secrets is not None: <NEW_LINE> <INDENT> parts = self.secrets.split(':') <NEW_LINE> if len(parts) == 2: <NEW_LINE> <INDENT> self.secrets_matcher = parts[0] <NEW_LINE> self.secrets_list = parts[1].split(',') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning("Couldn't parse INSTANA_SECRETS env var: %s", self.secrets) <NEW_LINE> <DEDENT> <DEDENT> self.__dict__.update(kwds)
Base class for all option classes. Holds items common to all
62599058d6c5a102081e36c5
class RegistryNameCheckRequest(Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True, 'max_length': 50, 'min_length': 5, 'pattern': '^[a-zA-Z0-9]*$'}, 'type': {'required': True, 'constant': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> type = "Microsoft.ContainerRegistry/registries" <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name
A request to check whether a container registry name is available. Variables are only populated by the server, and will be ignored when sending a request. :param name: The name of the container registry. :type name: str :ivar type: The resource type of the container registry. This field must be set to 'Microsoft.ContainerRegistry/registries'. Default value: "Microsoft.ContainerRegistry/registries" . :vartype type: str
62599058be8e80087fbc0628
class GdalHandlesCollection(object): <NEW_LINE> <INDENT> def __init__(self, inRats, outRats): <NEW_LINE> <INDENT> self.gdalHandlesDict = {} <NEW_LINE> self.inputRatList = [] <NEW_LINE> for ratHandleName in outRats.getRatList(): <NEW_LINE> <INDENT> ratHandle = getattr(outRats, ratHandleName) <NEW_LINE> if ratHandle not in self.gdalHandlesDict: <NEW_LINE> <INDENT> sharedDS = self.checkExistingDS(ratHandle) <NEW_LINE> self.gdalHandlesDict[ratHandle] = GdalHandles(ratHandle, update=True, sharedDS=sharedDS) <NEW_LINE> <DEDENT> <DEDENT> for ratHandleName in inRats.getRatList(): <NEW_LINE> <INDENT> ratHandle = getattr(inRats, ratHandleName) <NEW_LINE> if ratHandle not in self.gdalHandlesDict: <NEW_LINE> <INDENT> sharedDS = self.checkExistingDS(ratHandle) <NEW_LINE> self.gdalHandlesDict[ratHandle] = GdalHandles(ratHandle, update=False, sharedDS=sharedDS) <NEW_LINE> <DEDENT> self.inputRatList.append(ratHandle) <NEW_LINE> <DEDENT> <DEDENT> def getRowCount(self): <NEW_LINE> <INDENT> if len(self.inputRatList) > 0: <NEW_LINE> <INDENT> firstRatHandle = self.inputRatList[0] <NEW_LINE> gdalHandles = self.gdalHandlesDict[firstRatHandle] <NEW_LINE> rowCount = gdalHandles.gdalRat.GetRowCount() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rowCount = None <NEW_LINE> <DEDENT> return rowCount <NEW_LINE> <DEDENT> def checkConsistency(self): <NEW_LINE> <INDENT> rowCountList = [] <NEW_LINE> for ratHandle in self.inputRatList: <NEW_LINE> <INDENT> rowCount = self.gdalHandlesDict[ratHandle].gdalRat.GetRowCount() <NEW_LINE> filename = ratHandle.filename <NEW_LINE> rowCountList.append((filename, rowCount)) <NEW_LINE> <DEDENT> countList = [c for (f, c) in rowCountList] <NEW_LINE> allSame = all([(c == countList[0]) for c in countList]) <NEW_LINE> if not allSame: <NEW_LINE> <INDENT> msg = "RAT length mismatch\n%s\n" % '\n'.join( ["File: %s, rowcount:%s"%(fn, rc) for (fn, rc) in rowCountList]) <NEW_LINE> raise rioserrors.RatMismatchError(msg) <NEW_LINE> <DEDENT> <DEDENT> def checkExistingDS(self, ratHandle): <NEW_LINE> <INDENT> sharedDS = None <NEW_LINE> for existingRatHandle in self.gdalHandlesDict: <NEW_LINE> <INDENT> if existingRatHandle.filename == ratHandle.filename: <NEW_LINE> <INDENT> sharedDS = self.gdalHandlesDict[existingRatHandle].ds <NEW_LINE> <DEDENT> <DEDENT> return sharedDS
A set of all the GdalHandles objects
62599058e5267d203ee6ce93
class DeviceCombinationRow(DatabaseRow): <NEW_LINE> <INDENT> TABLE_NAME = "DEVICECOMBINATION" <NEW_LINE> FIELDS = { "DeviceCombinationPK": PrimaryKey(int, auto_increment=True), "RuleID": int, "SensorID": str, "SensorGroupID": int, "DeviceID": str, "DeviceGroupID": int, }
Row schema for the DEVICECOMBINATION table.
62599058507cdc57c63a634b
class EuroMapping(IdentityMapping): <NEW_LINE> <INDENT> def __init__(self, from_field=None, default=0): <NEW_LINE> <INDENT> self.default = default <NEW_LINE> super(EuroMapping, self).__init__(from_field) <NEW_LINE> <DEDENT> def map_value(self, old_value): <NEW_LINE> <INDENT> old_value = super(EuroMapping, self).map_value(old_value) <NEW_LINE> if not old_value: <NEW_LINE> <INDENT> old_value = self.default <NEW_LINE> <DEDENT> return "%01.2f" % (old_value)
Return an amount with default 0
62599058f7d966606f74938b
class MapNav(TileDir): <NEW_LINE> <INDENT> format,ext,input,output='mapnav','.mapnav',True,True <NEW_LINE> dir_pattern='Z[0-9]*/*/*.pic' <NEW_LINE> forced_ext = '.pic' <NEW_LINE> tile_class = FileTileNoExt <NEW_LINE> def path2coord(self,tile_path): <NEW_LINE> <INDENT> z,y,x=path2list(tile_path)[-4:-1] <NEW_LINE> return list(map(int,(z[1:],x,y))) <NEW_LINE> <DEDENT> def coord2path(self,z,x,y): <NEW_LINE> <INDENT> return 'Z%d/%d/%d' % (z,y,x)
MapNav (Global Mapper - compatible)
62599058cc0a2c111447c592
class TiffStack_pil(FramesSequence): <NEW_LINE> <INDENT> def __init__(self, fname, dtype=None): <NEW_LINE> <INDENT> self.im = Image.open(fname) <NEW_LINE> self.im.seek(0) <NEW_LINE> if dtype is None: <NEW_LINE> <INDENT> res = self.im.tag[0x102][0] <NEW_LINE> self._dtype = _dtype_map.get(res, np.int16) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._dtype = dtype <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> samples_per_pixel = self.im.tag[0x115][0] <NEW_LINE> if samples_per_pixel != 1: <NEW_LINE> <INDENT> raise ValueError("support for color not implemented") <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self._im_sz = (self.im.tag[0x101][0], self.im.tag[0x100][0]) <NEW_LINE> self.cur = self.im.tell() <NEW_LINE> for j in itertools.count(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.im.seek(j) <NEW_LINE> <DEDENT> except EOFError: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self._count = j <NEW_LINE> self.im.seek(0) <NEW_LINE> <DEDENT> def get_frame(self, j): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.im.seek(j) <NEW_LINE> <DEDENT> except EOFError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> self.cur = self.im.tell() <NEW_LINE> res = np.reshape(self.im.getdata(), self._im_sz).astype(self._dtype).T[::-1] <NEW_LINE> return Frame(res, frame_no=j) <NEW_LINE> <DEDENT> @property <NEW_LINE> def pixel_type(self): <NEW_LINE> <INDENT> return self._dtype <NEW_LINE> <DEDENT> @property <NEW_LINE> def frame_shape(self): <NEW_LINE> <INDENT> return self._im_sz <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._count
Class for wrapping tiff stacks (that is single file with many frames) that depends on PIL/PILLOW Parameters ---------- fname : str Fully qualified file name dtype : `None` or `numpy.dtype` If `None`, use the native type of the image, other wise coerce into the specified dtype.
62599058dd821e528d6da453
class Bst: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root = None <NEW_LINE> self.size = 0 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return tree_string(self.root) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__() <NEW_LINE> <DEDENT> def add(self, n): <NEW_LINE> <INDENT> self.root, added = bst_add(self.root, n) <NEW_LINE> if added: <NEW_LINE> <INDENT> self.size += 1 <NEW_LINE> <DEDENT> return added <NEW_LINE> <DEDENT> def find(self, n): <NEW_LINE> <INDENT> return bst_find(self.root, n) <NEW_LINE> <DEDENT> def remove(self, n): <NEW_LINE> <INDENT> self.root, removed = bst_remove(self.root, n) <NEW_LINE> if removed: <NEW_LINE> <INDENT> self.size -= 1 <NEW_LINE> <DEDENT> return removed <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.__init__()
A BST that does not contain duplicates.
6259905891f36d47f2231963
class Production(object): <NEW_LINE> <INDENT> def __init__(self, lhs, *rhs, annotations=list()): <NEW_LINE> <INDENT> self.lhs = lhs <NEW_LINE> self.rhs = rhs <NEW_LINE> self.annotations = annotations <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def with_annotations(cls, lhs, annotations, *rhs): <NEW_LINE> <INDENT> return cls(lhs, annotations=annotations, *rhs)
Represents a production in a grammar.
6259905823849d37ff85266c
class ComboboxFrame(Frame): <NEW_LINE> <INDENT> def __init__(self, master, label, vals, wdt, lbl_wdt=0): <NEW_LINE> <INDENT> Frame.__init__(self, master) <NEW_LINE> self.rowconfigure(0, minsize=5) <NEW_LINE> self.columnconfigure(0, minsize=5) <NEW_LINE> self.columnconfigure(1, minsize=lbl_wdt) <NEW_LINE> self.columnconfigure(2, minsize=wdt-lbl_wdt-10) <NEW_LINE> self.columnconfigure(3, minsize=5) <NEW_LINE> self.combox_input = StringVar() <NEW_LINE> Label( self, text=label). grid( row=1, column=1, sticky=W) <NEW_LINE> self.combox = Combobox( self, width=2, textvariable=self.combox_input, values=vals, state="readonly") <NEW_LINE> self.combox.grid( row=1, column=2, sticky=W+E)
GUI element containing a combobox and a label.
625990583cc13d1c6d466ce6
class Lap(): <NEW_LINE> <INDENT> def __init__(self, stats, lap_id, trackpoints=None): <NEW_LINE> <INDENT> self.lap_id = lap_id <NEW_LINE> self.stats = stats <NEW_LINE> self.trackpoints = [] if trackpoints is None else trackpoints <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = "Lap\n" <NEW_LINE> s += "Lap ID: {} \n".format(str(self.lap_id)) <NEW_LINE> for tp in self.trackpoints: <NEW_LINE> <INDENT> s += str(tp) <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def add_trackpoint(self, trackpoint): <NEW_LINE> <INDENT> self.trackpoints.append(trackpoint)
Represents a lap in a workout. Lap contains some data (averages and maximums) and a list of trackpoints. Public attributes: - lap_id: Lap id, must be unique only within a workout (integer, typically starts with 0 and increments). - stats: Workout statistics, typically averages and maximums (dictionary; keys and value types may vary). - trackpoints: Trackpoints (list of TrackPoint() objects).
6259905801c39578d7f1420a
class PsCmpScatter(PsCmpObservation): <NEW_LINE> <INDENT> lats = List.T(Float.T(), optional=True, default=[10.4, 10.5]) <NEW_LINE> lons = List.T(Float.T(), optional=True, default=[12.3, 13.4]) <NEW_LINE> def string_for_config(self): <NEW_LINE> <INDENT> srows = [] <NEW_LINE> for lat, lon in zip(self.lats, self.lons): <NEW_LINE> <INDENT> srows.append('(%15f, %15f)' % (lat, lon)) <NEW_LINE> <DEDENT> self.sw = 0 <NEW_LINE> return ' %i' % (len(srows)), '\n'.join(srows)
Scattered observation points.
62599058adb09d7d5dc0bb11
class GenericSerializer(APISerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = self.model <NEW_LINE> fields = "__all__"
Placeholder for the serializer we will create dynamically below.
625990587047854f46340966
class SubmissionFileHandler(FileHandler): <NEW_LINE> <INDENT> @require_permission(BaseHandler.AUTHENTICATED) <NEW_LINE> def get(self, file_id): <NEW_LINE> <INDENT> sub_file = self.safe_get_item(File, file_id) <NEW_LINE> submission = sub_file.submission <NEW_LINE> real_filename = sub_file.filename <NEW_LINE> if submission.language is not None: <NEW_LINE> <INDENT> real_filename = real_filename.replace("%l", submission.language) <NEW_LINE> <DEDENT> digest = sub_file.digest <NEW_LINE> self.sql_session.close() <NEW_LINE> self.fetch(digest, "text/plain", real_filename)
Shows a submission file.
62599058d486a94d0ba2d56f
class GroceryData(BaseData, Auxiliary): <NEW_LINE> <INDENT> def __init__( self, split_dataset, config=None, intersect=True, binarize=True, bin_thld=0.0, normalize=False, ): <NEW_LINE> <INDENT> BaseData.__init__( self, split_dataset=split_dataset, intersect=intersect, binarize=binarize, bin_thld=bin_thld, normalize=normalize, ) <NEW_LINE> self.config = config <NEW_LINE> Auxiliary.__init__( self, config=config, n_users=self.n_users, n_items=self.n_items ) <NEW_LINE> <DEDENT> def sample_triple_time(self, dump=True, load_save=False): <NEW_LINE> <INDENT> sample_file_name = ( "triple_" + self.config["dataset"]["dataset"] + ( ("_" + str(self.config["dataset"]["percent"] * 100)) if "percent" in self.config else "" ) + ( ("_" + str(self.config["model"]["time_step"])) if "time_step" in self.config else "_10" ) + "_" + str(self.config["model"]["n_sample"]) if "percent" in self.config else "" + ".csv" ) <NEW_LINE> self.process_path = self.config["system"]["process_dir"] <NEW_LINE> ensureDir(self.process_path) <NEW_LINE> sample_file = os.path.join(self.process_path, sample_file_name) <NEW_LINE> my_sampler = Sampler( self.train, sample_file, self.config["model"]["n_sample"], dump=dump, load_save=load_save, ) <NEW_LINE> return my_sampler.sample_by_time(self.config["model"]["time_step"]) <NEW_LINE> <DEDENT> def sample_triple(self, dump=True, load_save=False): <NEW_LINE> <INDENT> sample_file_name = ( "triple_" + self.config["dataset"]["dataset"] + ( ("_" + str(self.config["dataset"]["percent"] * 100)) if "percent" in self.config else "" ) + "_" + str(self.config["model"]["n_sample"]) if "percent" in self.config else "" + ".csv" ) <NEW_LINE> self.process_path = self.config["system"]["process_dir"] <NEW_LINE> ensureDir(self.process_path) <NEW_LINE> sample_file = os.path.join(self.process_path, sample_file_name) <NEW_LINE> my_sampler = Sampler( self.train, sample_file, self.config["model"]["n_sample"], dump=dump, load_save=load_save, ) <NEW_LINE> return my_sampler.sample()
A Grocery Data object, which consist one more order/basket column than the BaseData. Re-index all the users and items from raw dataset. Args: split_dataset (train,valid,test): the split dataset, a tuple consisting of training (DataFrame), validate/list of validate (DataFrame), testing/list of testing (DataFrame). intersect (bool, optional): remove users and items of test/valid sets that do not exist in the train set. If the model is able to predict for new users and new items, this can be :obj:`False` (default: :obj:`True`). binarize (bool, optional): binarize the rating column of train set 0 or 1, i.e. implicit feedback. (default: :obj:`True`). bin_thld (int, optional): the threshold of binarization (default: :obj:`0`). normalize (bool, optional): normalize the rating column of train set into [0, 1], i.e. explicit feedback. (default: :obj:`False`).
6259905891af0d3eaad3b3cf
class GeneralTest(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> call_command('create_fake_users', 10) <NEW_LINE> <DEDENT> def test_index(self): <NEW_LINE> <INDENT> client = Client() <NEW_LINE> response = client.get('/') <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> <DEDENT> def test_shorten(self): <NEW_LINE> <INDENT> TEST_URL = 'https://google.com/robots.txt' <NEW_LINE> client = Client() <NEW_LINE> response = client.post('/shorten', { 'url': TEST_URL }) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertTrue(TEST_URL in response.content.decode('utf-8')) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> pass
A really minimal set of smoketests to make sure things at least look OK on the surface.
625990587d847024c075d983
class DummySlideA(object): <NEW_LINE> <INDENT> def get_tags(self): <NEW_LINE> <INDENT> return {1, 3, 5, 7, 9}
Dummy Slide class for testing.
6259905863d6d428bbee3d5b
class baseCrud(metaclass=ABCMeta): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def add(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def reload(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def save(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def delete(self, obj): <NEW_LINE> <INDENT> pass
Base class for CRUD operations and persistence
6259905899cbb53fe6832486
class MockWebDriver(IWebDriver): <NEW_LINE> <INDENT> attr_dict={} <NEW_LINE> js_dict={ "location.href":"http://www.test.com", "document.title":"testtitle", "document.cookie":{"Test":"test"}, "document.readyState":"complete", "close()":"closePage", } <NEW_LINE> rect=[0,1,2,3] <NEW_LINE> def __init__(self, webview): <NEW_LINE> <INDENT> self._webview = webview <NEW_LINE> <DEDENT> def set_attr_dict(self,dict): <NEW_LINE> <INDENT> self.attr_dict={} <NEW_LINE> self.attr_dict.update(dict) <NEW_LINE> <DEDENT> def set_dict(self, key, value): <NEW_LINE> <INDENT> self.js_dict[key]=value <NEW_LINE> <DEDENT> def get_attribute(self, elem_xpaths, attr_name): <NEW_LINE> <INDENT> return self.attr_dict[attr_name] <NEW_LINE> <DEDENT> def get_property(self, elem_xpaths, prop_name): <NEW_LINE> <INDENT> return self.attr_dict[prop_name] <NEW_LINE> <DEDENT> def get_style(self, elem_xpaths, style_name): <NEW_LINE> <INDENT> return self.attr_dict[style_name] <NEW_LINE> <DEDENT> def set_attribute(self,locator,name,value): <NEW_LINE> <INDENT> self.attr_dict[name] = value <NEW_LINE> <DEDENT> def set_property(self,locator,name,value): <NEW_LINE> <INDENT> self.attr_dict[name] = value <NEW_LINE> <DEDENT> def highlight(self, elem_xpaths): <NEW_LINE> <INDENT> return elem_xpaths <NEW_LINE> <DEDENT> def drag_element(self,x1,y1,x2,y2): <NEW_LINE> <INDENT> rect =[] <NEW_LINE> rect.append(x1) <NEW_LINE> rect.append(y1) <NEW_LINE> rect.append(x2) <NEW_LINE> rect.append(y2) <NEW_LINE> return rect <NEW_LINE> <DEDENT> def eval_script(self, frame_xpaths, script): <NEW_LINE> <INDENT> if script in self.js_dict.keys(): <NEW_LINE> <INDENT> return self.js_dict[script] <NEW_LINE> <DEDENT> <DEDENT> def get_element(self,locator): <NEW_LINE> <INDENT> return locator <NEW_LINE> <DEDENT> def set_rect(self,list): <NEW_LINE> <INDENT> self.rect=[] <NEW_LINE> self.rect.extend(list) <NEW_LINE> <DEDENT> def get_elem_rect(self,locator,recv=True): <NEW_LINE> <INDENT> return self.rect <NEW_LINE> <DEDENT> def scroll_to_visible(self, locators): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def read_console_log(self,timeout=10): <NEW_LINE> <INDENT> return "testlog" <NEW_LINE> <DEDENT> def get_element_count(self,locators): <NEW_LINE> <INDENT> return 3
Mock WebDriver
62599058f7d966606f74938c
class LedsService: <NEW_LINE> <INDENT> class State: <NEW_LINE> <INDENT> none, waking_up, standby, listening, loading, notify, error = range(7) <NEW_LINE> <DEDENT> def __init__(self, thread_handler): <NEW_LINE> <INDENT> self.thread_handler = thread_handler <NEW_LINE> led_device = USB.get_boards() <NEW_LINE> if led_device == USB.Device.respeaker: <NEW_LINE> <INDENT> self.animator = ReSpeakerAnimator() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.animator = None <NEW_LINE> <DEDENT> <DEDENT> def start_animation(self, animation_id): <NEW_LINE> <INDENT> if not self.animator: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> (animation, identifier) = self.get_animation(animation_id) <NEW_LINE> self.thread_handler.run(target=self.animator.run, args=(identifier, animation, )) <NEW_LINE> <DEDENT> def get_animation(self, animation): <NEW_LINE> <INDENT> identifier = str(random.randint(1, 100000)) <NEW_LINE> return (Animation(identifier, animation), identifier)
Leds service for handling visual feedback for various states of the system.
625990583eb6a72ae038bc07
class Schwefel20(Benchmark): <NEW_LINE> <INDENT> def __init__(self, dimensions=2): <NEW_LINE> <INDENT> Benchmark.__init__(self, dimensions) <NEW_LINE> self._bounds = list(zip([-100.0] * self.N, [100.0] * self.N)) <NEW_LINE> self.global_optimum = [[0.0 for _ in range(self.N)]] <NEW_LINE> self.fglob = 0.0 <NEW_LINE> self.change_dimensionality = True <NEW_LINE> <DEDENT> def fun(self, x, *args): <NEW_LINE> <INDENT> self.nfev += 1 <NEW_LINE> return sum(abs(x))
Schwefel 20 objective function. This class defines the Schwefel 20 [1]_ global optimization problem. This is a unimodal minimization problem defined as follows: .. math:: f_{\text{Schwefel20}}(x) = \sum_{i=1}^n \lvert x_i \rvert Here, :math:`n` represents the number of dimensions and :math:`x_i \in [-100, 100]` for :math:`i = 1, ..., n`. *Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for :math:`i = 1, ..., n` .. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions For Global Optimization Problems Int. Journal of Mathematical Modelling and Numerical Optimisation, 2013, 4, 150-194. TODO: Jamil #122 is incorrect. There shouldn't be a leading minus sign.
6259905856b00c62f0fb3e73
class CompetenceAPI(MethodView): <NEW_LINE> <INDENT> @admin_required <NEW_LINE> def patch(self, compt_id): <NEW_LINE> <INDENT> competence = Competence.query.get_or_404(compt_id) <NEW_LINE> in_schema = CompetenceSchema(exclude=('id',)) <NEW_LINE> try: <NEW_LINE> <INDENT> updated_competence = in_schema.load(request.json, instance=competence, partial=True) <NEW_LINE> <DEDENT> except ValidationError as err: <NEW_LINE> <INDENT> abort(400, {'message': err.messages}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> db.session.add(updated_competence) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> except IntegrityError as err: <NEW_LINE> <INDENT> db.session.rollback() <NEW_LINE> log.exception(err) <NEW_LINE> abort(400, {'message': 'Data integrity violated.'}) <NEW_LINE> <DEDENT> out_schema = CompetenceSchema() <NEW_LINE> return out_schema.jsonify(updated_competence) <NEW_LINE> <DEDENT> @admin_required <NEW_LINE> def delete(self, compt_id): <NEW_LINE> <INDENT> competence = Competence.query.get_or_404(compt_id) <NEW_LINE> try: <NEW_LINE> <INDENT> db.session.delete(competence) <NEW_LINE> db.session.commit() <NEW_LINE> <DEDENT> except IntegrityError as err: <NEW_LINE> <INDENT> db.session.rollback() <NEW_LINE> log.exception(err) <NEW_LINE> abort(400, {'message': 'Data integrity violated.'}) <NEW_LINE> <DEDENT> return NO_PAYLOAD
REST views for a particular instance of a Competence model.
62599058cc0a2c111447c594
class Email(FolioApi): <NEW_LINE> <INDENT> def get_emails(self, **kwargs): <NEW_LINE> <INDENT> return self.call("GET", "/organizations-storage/emails", query=kwargs) <NEW_LINE> <DEDENT> def set_email(self, email: dict): <NEW_LINE> <INDENT> return self.call("POST", "/organizations-storage/emails", data=email) <NEW_LINE> <DEDENT> def get_email(self, emailsId: str): <NEW_LINE> <INDENT> return self.call("GET", f"/organizations-storage/emails/{emailsId}") <NEW_LINE> <DEDENT> def delete_email(self, emailsId: str): <NEW_LINE> <INDENT> return self.call("DELETE", f"/organizations-storage/emails/{emailsId}") <NEW_LINE> <DEDENT> def modify_email(self, emailsId: str, email: dict): <NEW_LINE> <INDENT> return self.call("PUT", f"/organizations-storage/emails/{emailsId}", data=email)
**Deprecated.** Emails CRUD APIs used to manage emails. **These APIs are not currently in use and may at some point be removed or resurrected**
625990587b25080760ed87b3
class Afirmation(Text): <NEW_LINE> <INDENT> def __init__(self, request, intent, is_found): <NEW_LINE> <INDENT> super().__init__(request, intent) <NEW_LINE> self.is_found = is_found <NEW_LINE> <DEDENT> @cs.inject() <NEW_LINE> async def rank(self, context) -> float: <NEW_LINE> <INDENT> if "frame_analyzer" not in context: <NEW_LINE> <INDENT> return .0 <NEW_LINE> <DEDENT> analyzer = FrameXAnalyzer(**context["frame_analyzer"]) <NEW_LINE> try: <NEW_LINE> <INDENT> user_reply = self.request.get_layer(lyr.RawText).text <NEW_LINE> <DEDENT> except (KeyError, ValueError, TypeError): <NEW_LINE> <INDENT> return .0 <NEW_LINE> <DEDENT> is_launched = user_reply == "Yes" <NEW_LINE> analyzer.get_next_frame(is_launched=is_launched) <NEW_LINE> context["frame_analyzer"] = analyzer.instance_data <NEW_LINE> found_condition = self.is_found == analyzer.frame_found <NEW_LINE> intent_condition = await super().rank() == 1. <NEW_LINE> return 1. if (found_condition and intent_condition) else .0
This trigger interpret the user reply as yes or no, but also detect if the frame is found in order to pass to final state :param request: Request provided by bernard :type request: class `bernard.engine.request.Request` :param user_reply: User reply if the roacket is launched or not :type user_reply: str :param is_found: If the frame when the rocket is launched is found this param is true :type is_found: bool
625990582ae34c7f260ac68f
class Event(object): <NEW_LINE> <INDENT> def __init__(self, source, name, data=None): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> self.name = name <NEW_LINE> self.data = data if data is not None else {}
Event being dispatched by the observable. Parameters ---------- source : Observable The observable instance. name : str The name of the event. data : dict The information to be send.
62599058097d151d1a2c2614
class BaseConfig: <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> TESTING = False <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False <NEW_LINE> UPLOAD_FOLDER = os.path.join( current_app.root_path, 'uploads' ) <NEW_LINE> ALLOWED_EXTENSIONS = {'txt', 'json'} <NEW_LINE> DEFAULT_LIST_INFO = {'shortname': '_feed', 'longname': 'Feed', 'description': 'Default Feed List'} <NEW_LINE> DEFAULT_HEADERS = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:62.0) Gecko/20100101 Firefox/62.0'} <NEW_LINE> PER_PAGE = 24
Base configuration
625990584e4d5625663739af
class S(Fd): <NEW_LINE> <INDENT> def __init__(self, k, t, name=''): <NEW_LINE> <INDENT> if k < 0: <NEW_LINE> <INDENT> raise ValueError("invalid negative order k") <NEW_LINE> <DEDENT> self.k, self.t, self.name = k, t, name <NEW_LINE> if self.n <= 0: <NEW_LINE> <INDENT> raise ValueError("invalid knot sequence") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def n(self): <NEW_LINE> <INDENT> return len(self.t) - self.k <NEW_LINE> <DEDENT> @property <NEW_LINE> def a(self): <NEW_LINE> <INDENT> return self.t[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def b(self): <NEW_LINE> <INDENT> return self.t[-1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def tstar(self): <NEW_LINE> <INDENT> np1 = self.n + 1 <NEW_LINE> tstar = np.zeros((np1,), order='F') <NEW_LINE> for i in range(np1): <NEW_LINE> <INDENT> tstar[i] = np.sum(self.t[i+1:i+self.k]) <NEW_LINE> <DEDENT> tstar /= float(self.k - 1) <NEW_LINE> return tstar <NEW_LINE> <DEDENT> def tau4Ik(self, k_is_even=True): <NEW_LINE> <INDENT> k, n, t = self.k, self.n, self.t <NEW_LINE> if k % 2 != 0: <NEW_LINE> <INDENT> k_is_even = False <NEW_LINE> print("WARNING: k order is not even") <NEW_LINE> <DEDENT> tau, m = np.zeros((n,), order='F'), k // 2 <NEW_LINE> tau[0], tau[-1] = self.a, self.b <NEW_LINE> dt = (t[k] - t[k-1]) / float(m) <NEW_LINE> for i in range(m): <NEW_LINE> <INDENT> tau[i+1] = tau[i] + dt <NEW_LINE> <DEDENT> for i in range(n - k): <NEW_LINE> <INDENT> tau[m+i] = t[k+i] <NEW_LINE> <DEDENT> j = n - k + m <NEW_LINE> if not k_is_even: <NEW_LINE> <INDENT> m += 1 <NEW_LINE> dt = (t[n] - t[n-1]) / float(m) <NEW_LINE> <DEDENT> for i in range(m): <NEW_LINE> <INDENT> tau[j+i] = tau[j+i-1] + dt <NEW_LINE> <DEDENT> return tau <NEW_LINE> <DEDENT> def cmptau(self): <NEW_LINE> <INDENT> return self.tau4Ik() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "S(k=%d,t=" % self.k + str(self.t) + ")" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> l = "S_{ k=%d, t }, with\n t = [" % self.k <NEW_LINE> return l + ','.join(["%.3f" % v for v in self.t]) + ']'
Class representing the function space populated by Bsplines of polynomial order *k* and knot sequence :math:`\vec{t}`. :Example: >>> S0 = S(4,[1., 1., 2., 3., 4., 4.]) >>> print S0 S_{ k=4, t }, with t = [1.000,1.000,2.000,3.000,4.000,4.000]
62599058baa26c4b54d5084c
class subteamSelectWidget(forms.Select): <NEW_LINE> <INDENT> def __init__(self, attrs={'class': 'form-control'}, choices=()): <NEW_LINE> <INDENT> super(subteamSelectWidget, self).__init__(attrs, choices) <NEW_LINE> <DEDENT> def render_option(self, selected_choices, option_value, option_label): <NEW_LINE> <INDENT> result = super(subteamSelectWidget, self).render_option(selected_choices, option_value, option_label) <NEW_LINE> if option_value: <NEW_LINE> <INDENT> parentid = Team.objects.get(pk=option_value).parent.id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parentid = 'blank' <NEW_LINE> <DEDENT> open_tag_end = result.index('>') <NEW_LINE> result = result[:open_tag_end] + ' data-parent="{}"'.format(parentid) + result[open_tag_end:] <NEW_LINE> return result
Modification of Select widget: add the team's parent Employer via a data-parent attribute on rendering <option>
625990583539df3088ecd845
class OrganizationCreateEvent(TestCase, Utilities): <NEW_LINE> <INDENT> def test_create_event(self): <NEW_LINE> <INDENT> organization_dict = { "email": "[email protected]", "password": "testpassword123", "name": "testOrg1", "street_address": "1 IU st", "city": "Bloomington", "state": "Indiana", "phone_number": "765-426-3677", "organization_motto": "The motto" } <NEW_LINE> Utilities.organization_signup(self, organization_dict) <NEW_LINE> tokens = Utilities.organization_login(self, organization_dict) <NEW_LINE> self.create_event(tokens['access']) <NEW_LINE> <DEDENT> def create_event(self, access_token, expected=201): <NEW_LINE> <INDENT> newEvent = { "start_time": "2019-10-19 12:00:00-05:00", "end_time": "2019-10-20 13:00:00-05:00", "date": "2019-10-19", "title": "testActivity", "location": "SICE", "description": "testing endpoint" } <NEW_LINE> client = RequestsClient() <NEW_LINE> client.headers.update({'Authorization': 'Bearer ' + access_token}) <NEW_LINE> path = "http://testserver/api/organization/event/" <NEW_LINE> create_event_response = client.post(path, json=newEvent) <NEW_LINE> self.assertEqual(create_event_response.status_code, expected, msg=create_event_response.content)
Test the endpoint to create event
625990588e71fb1e983bd072
class RotatingFileHandler(BaseRotatingHandler): <NEW_LINE> <INDENT> def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, delay=0): <NEW_LINE> <INDENT> if maxBytes > 0: <NEW_LINE> <INDENT> mode = 'a' <NEW_LINE> <DEDENT> BaseRotatingHandler.__init__(self, filename, mode, encoding, delay) <NEW_LINE> self.maxBytes = maxBytes <NEW_LINE> self.backupCount = backupCount <NEW_LINE> <DEDENT> def doRollover(self): <NEW_LINE> <INDENT> if self.stream: <NEW_LINE> <INDENT> self.stream.close() <NEW_LINE> self.stream = None <NEW_LINE> <DEDENT> if self.backupCount > 0: <NEW_LINE> <INDENT> for i in range(self.backupCount - 1, 0, -1): <NEW_LINE> <INDENT> sfn = "%s.%d" % (self.baseFilename, i) <NEW_LINE> dfn = "%s.%d" % (self.baseFilename, i + 1) <NEW_LINE> if os.path.exists(sfn): <NEW_LINE> <INDENT> if os.path.exists(dfn): <NEW_LINE> <INDENT> os.remove(dfn) <NEW_LINE> <DEDENT> os.rename(sfn, dfn) <NEW_LINE> <DEDENT> <DEDENT> dfn = self.baseFilename + ".1" <NEW_LINE> if os.path.exists(dfn): <NEW_LINE> <INDENT> os.remove(dfn) <NEW_LINE> <DEDENT> os.rename(self.baseFilename, dfn) <NEW_LINE> <DEDENT> self.stream = self._open() <NEW_LINE> <DEDENT> def shouldRollover(self, record): <NEW_LINE> <INDENT> if self.stream is None: <NEW_LINE> <INDENT> self.stream = self._open() <NEW_LINE> <DEDENT> if self.maxBytes > 0: <NEW_LINE> <INDENT> msg = "%s\n" % self.format(record) <NEW_LINE> self.stream.seek(0, 2) <NEW_LINE> if self.stream.tell() + len(msg) >= self.maxBytes: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> return 0
Handler for logging to a set of files, which switches from one file to the next when the current file reaches a certain size.
6259905801c39578d7f1420b
class HuntingGrounds(ResourceField): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ResourceField.__init__(self) <NEW_LINE> self.resource = Resource.food <NEW_LINE> self.name = "Hunting grounds (f)" <NEW_LINE> self.abreviation = 'f' <NEW_LINE> <DEDENT> def freeSlots(self): <NEW_LINE> <INDENT> return 10 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> filled = [player.getOutputAbr() for player in self.persons for n in range(self.count(player))] <NEW_LINE> return self.name + ": " + " ".join(filled)
Class to represent a food Resource field on the board.
625990581b99ca400229000c
class XMLError(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message
Exception raised by XML validation.
625990587d847024c075d985
class RecentUnits(list): <NEW_LINE> <INDENT> def __init__(self, options): <NEW_LINE> <INDENT> list.__init__(self) <NEW_LINE> self.options = options <NEW_LINE> self.updateQuantity() <NEW_LINE> self.loadList() <NEW_LINE> <DEDENT> def updateQuantity(self): <NEW_LINE> <INDENT> self.numEntries = self.options.intData('RecentUnits', 0, 99) <NEW_LINE> del self[self.numEntries:] <NEW_LINE> <DEDENT> def loadList(self): <NEW_LINE> <INDENT> self[:] = [] <NEW_LINE> for num in range(self.numEntries): <NEW_LINE> <INDENT> name = self.options.strData(self.optionTitle(num), True) <NEW_LINE> if name: <NEW_LINE> <INDENT> self.append(name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def writeList(self): <NEW_LINE> <INDENT> for num in range(self.numEntries): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> name = self[num] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> name = '' <NEW_LINE> <DEDENT> self.options.changeData(self.optionTitle(num), name, True) <NEW_LINE> <DEDENT> self.options.writeChanges() <NEW_LINE> <DEDENT> def addEntry(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.remove(name) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.insert(0, name) <NEW_LINE> del self[self.numEntries:] <NEW_LINE> <DEDENT> def optionTitle(self, num): <NEW_LINE> <INDENT> return 'RecentUnit%d' % (num + 1)
A list of recent unit combo names
62599058097d151d1a2c2615
class CartoDBBase(object): <NEW_LINE> <INDENT> MAX_GET_QUERY_LEN = 2048 <NEW_LINE> def __init__(self, cartodb_domain, host='cartodb.com', protocol='https', api_version=None, proxy_info=None, sql_api_version='v2', import_api_version='v1'): <NEW_LINE> <INDENT> if api_version is None: <NEW_LINE> <INDENT> api_version = sql_api_version <NEW_LINE> <DEDENT> self.resource_url = RESOURCE_URL % {'user': cartodb_domain, 'domain': host, 'protocol': protocol, 'api_version': api_version} <NEW_LINE> self.imports_url = IMPORTS_URL % {'user': cartodb_domain, 'domain': host, 'protocol': protocol, 'api_version': import_api_version} <NEW_LINE> self.host = host <NEW_LINE> self.protocol = protocol <NEW_LINE> self.api_version = api_version <NEW_LINE> if type(proxy_info) == httplib2.ProxyInfo: <NEW_LINE> <INDENT> self.proxy_info = proxy_info <NEW_LINE> self.proxies = proxyinfo2proxies(self.proxy_info) <NEW_LINE> <DEDENT> if type(proxy_info) == dict: <NEW_LINE> <INDENT> self.proxies = proxy_info <NEW_LINE> self.proxy_info = proxies2proxyinfo(self.proxies) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.proxy_info = None <NEW_LINE> self.proxies = None <NEW_LINE> <DEDENT> <DEDENT> def req(self, url, http_method="GET", http_headers=None, body=None, params=None, files=None): <NEW_LINE> <INDENT> raise NotImplementedError('req method must be implemented') <NEW_LINE> <DEDENT> def get_response_data(self, resp, parse_json=True): <NEW_LINE> <INDENT> if resp.status_code == requests.codes.ok: <NEW_LINE> <INDENT> if parse_json: <NEW_LINE> <INDENT> return resp.json() <NEW_LINE> <DEDENT> return resp.content <NEW_LINE> <DEDENT> elif resp.status_code == requests.codes.bad_request: <NEW_LINE> <INDENT> raise CartoDBException(resp.json()['error']) <NEW_LINE> <DEDENT> elif resp.status_code == requests.codes.not_found: <NEW_LINE> <INDENT> raise CartoDBException('Not found: ' + resp.url) <NEW_LINE> <DEDENT> elif resp.status_code == requests.codes.internal_server_error: <NEW_LINE> <INDENT> raise CartoDBException('Internal server error') <NEW_LINE> <DEDENT> elif resp.status_code == requests.codes.unauthorized or resp.status_code == requests.codes.forbidden: <NEW_LINE> <INDENT> raise CartoDBException('Access denied') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise CartoDBException('Unknown error occurred') <NEW_LINE> <DEDENT> <DEDENT> def sql(self, sql, parse_json=True, do_post=True, format=None): <NEW_LINE> <INDENT> params = {'q': sql} <NEW_LINE> if format: <NEW_LINE> <INDENT> params['format'] = format <NEW_LINE> if format not in ['json', 'geojson']: <NEW_LINE> <INDENT> parse_json = False <NEW_LINE> <DEDENT> <DEDENT> url = self.resource_url <NEW_LINE> if len(sql) < self.MAX_GET_QUERY_LEN and not do_post: <NEW_LINE> <INDENT> resp = self.req(url, 'GET', params=params) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resp = self.req(url, 'POST', body=params) <NEW_LINE> <DEDENT> return self.get_response_data(resp, parse_json)
basic client to access cartodb api
62599058d7e4931a7ef3d629
class IPanel(IColumn, IContained): <NEW_LINE> <INDENT> layout = Attribute("Assigned layout.")
A portlet panel. Register a portlet for this portlet manager type to enable them only for the panel (and not for the regular portlet column manager).
6259905899cbb53fe6832489
class PremierAddOn(Resource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'location': {'required': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'sku': {'key': 'properties.sku', 'type': 'str'}, 'product': {'key': 'properties.product', 'type': 'str'}, 'vendor': {'key': 'properties.vendor', 'type': 'str'}, 'marketplace_publisher': {'key': 'properties.marketplacePublisher', 'type': 'str'}, 'marketplace_offer': {'key': 'properties.marketplaceOffer', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, location: str, kind: Optional[str] = None, tags: Optional[Dict[str, str]] = None, sku: Optional[str] = None, product: Optional[str] = None, vendor: Optional[str] = None, marketplace_publisher: Optional[str] = None, marketplace_offer: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(PremierAddOn, self).__init__(kind=kind, location=location, tags=tags, **kwargs) <NEW_LINE> self.sku = sku <NEW_LINE> self.product = product <NEW_LINE> self.vendor = vendor <NEW_LINE> self.marketplace_publisher = marketplace_publisher <NEW_LINE> self.marketplace_offer = marketplace_offer
Premier add-on. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Resource Id. :vartype id: str :ivar name: Resource Name. :vartype name: str :ivar kind: Kind of resource. :vartype kind: str :ivar location: Required. Resource Location. :vartype location: str :ivar type: Resource type. :vartype type: str :ivar tags: A set of tags. Resource tags. :vartype tags: dict[str, str] :ivar sku: Premier add on SKU. :vartype sku: str :ivar product: Premier add on Product. :vartype product: str :ivar vendor: Premier add on Vendor. :vartype vendor: str :ivar marketplace_publisher: Premier add on Marketplace publisher. :vartype marketplace_publisher: str :ivar marketplace_offer: Premier add on Marketplace offer. :vartype marketplace_offer: str
62599058cc0a2c111447c596
class Value(object): <NEW_LINE> <INDENT> __slots__ = ["value", "modifiers"] <NEW_LINE> def __init__(self, value, modifiers=()): <NEW_LINE> <INDENT> self.value = str(value) <NEW_LINE> if modifiers: <NEW_LINE> <INDENT> self.modifiers = tuple(modifiers) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.modifiers = None <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.value) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{:s}({!r}, {!r})".format(self.__class__.__name__, self.value, self.modifiers)
Class representing a value and its modifiers in the OBO file This class has two member variables. `value` is the value itself, `modifiers` are the corresponding modifiers in a tuple. Currently the modifiers are not parsed in any way, but this might change in the future.
62599058ac7a0e7691f73a8b
class Book(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=200) <NEW_LINE> author = models.ForeignKey('Author', on_delete=models.SET_NULL, null=True) <NEW_LINE> summary = models.TextField(max_length=1000, help_text="Enter a brief description of the book") <NEW_LINE> isbn = models.CharField('ISBN', max_length=13, help_text='13 Character <a href="https://www.isbn-international.org/content/what-isbn">ISBN number</a>') <NEW_LINE> genre = models.ManyToManyField(Genre, help_text="Select a genre for this book") <NEW_LINE> language = models.ForeignKey('Language', on_delete=models.SET_NULL, null=True) <NEW_LINE> def display_genre(self): <NEW_LINE> <INDENT> return ', '.join([genre.name for genre in self.genre.all()[:3]]) <NEW_LINE> <DEDENT> display_genre.short_description = 'Genre' <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title + ": " + str(self.author) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('book-detail', args=[str(self.id)])
Model representing a book (but not a specific copy of a book).
625990588e71fb1e983bd074
class WebsitePinger: <NEW_LINE> <INDENT> _ERROR_RESULT = """Result: ({}, {} - {})""" <NEW_LINE> def ping_websites(self, websites): <NEW_LINE> <INDENT> errors = [] <NEW_LINE> for website in websites: <NEW_LINE> <INDENT> connection = httplib2.Http(".cache") <NEW_LINE> response, _ = connection.request(website, "GET") <NEW_LINE> print(response.status) <NEW_LINE> if response.status != 200 and response.status != 303: <NEW_LINE> <INDENT> errors.append(self._ERROR_RESULT.format(website, response.status, response.reason)) <NEW_LINE> <DEDENT> <DEDENT> return errors
Website Pinger
6259905823849d37ff852670
class Formatter(object): <NEW_LINE> <INDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.format(*args, **kwargs) <NEW_LINE> <DEDENT> def format(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError("Subclasses are expected to implement " "the format() method")
Empty class for the time being. Currently used only for finding all built-in subclasses
625990581b99ca400229000d
class MSIXImageURI(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'uri': {'key': 'uri', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, uri: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(MSIXImageURI, self).__init__(**kwargs) <NEW_LINE> self.uri = uri
Represents URI referring to MSIX Image. :param uri: URI to Image. :type uri: str
625990587047854f4634096a
class NATPMPError(Exception): <NEW_LINE> <INDENT> pass
Generic exception state. May be used to represent unknown errors.
62599058460517430c432b27
class FilePublisher(publisher.PublisherBase): <NEW_LINE> <INDENT> def __init__(self, parsed_url): <NEW_LINE> <INDENT> super(FilePublisher, self).__init__(parsed_url) <NEW_LINE> self.publisher_logger = None <NEW_LINE> path = parsed_url.path <NEW_LINE> if not path or path.lower() == 'file': <NEW_LINE> <INDENT> LOG.error('The path for the file publisher is required') <NEW_LINE> return <NEW_LINE> <DEDENT> rfh = None <NEW_LINE> max_bytes = 0 <NEW_LINE> backup_count = 0 <NEW_LINE> if parsed_url.query: <NEW_LINE> <INDENT> params = urlparse.parse_qs(parsed_url.query) <NEW_LINE> if params.get('max_bytes') and params.get('backup_count'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> max_bytes = int(params.get('max_bytes')[0]) <NEW_LINE> backup_count = int(params.get('backup_count')[0]) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> LOG.error('max_bytes and backup_count should be ' 'numbers.') <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> rfh = logging.handlers.RotatingFileHandler( path, encoding='utf8', maxBytes=max_bytes, backupCount=backup_count) <NEW_LINE> self.publisher_logger = logging.Logger('publisher.file') <NEW_LINE> self.publisher_logger.propagate = False <NEW_LINE> self.publisher_logger.setLevel(logging.INFO) <NEW_LINE> rfh.setLevel(logging.INFO) <NEW_LINE> self.publisher_logger.addHandler(rfh) <NEW_LINE> <DEDENT> def publish_counters(self, context, counters, source): <NEW_LINE> <INDENT> if self.publisher_logger: <NEW_LINE> <INDENT> self.publisher_logger.info(counters)
Publisher metering data to file. The publisher which records metering data into a file. The file name and location should be configured in ceilometer pipeline configuration file. If a file name and location is not specified, this File Publisher will not log any meters other than log a warning in Ceilometer log file. To enable this publisher, add the following section to file /etc/ceilometer/publisher.yaml or simply add it to an existing pipeline. - name: meter_file interval: 600 counters: - "*" transformers: publishers: - file:///var/test?max_bytes=10000000&backup_count=5 File path is required for this publisher to work properly. If max_bytes or backup_count is missing, FileHandler will be used to save the metering data. If max_bytes and backup_count are present, RotatingFileHandler will be used to save the metering data.
625990582c8b7c6e89bd4d99
class Config_DjangoCMSExtended(Config_DjangoCMSCascade): <NEW_LINE> <INDENT> additional_apps = ( 'djangocms_snippet', 'cmsplugin_filer_file', 'cmsplugin_filer_link', 'cmsplugin_filer_folder', 'cmsplugin_filer_image', 'cmsplugin_filer_teaser', 'cmsplugin_filer_video', 'easy_thumbnails', 'filer', 's3_folder_storage', 'aldryn_background_image', ) <NEW_LINE> _caches_ = { 'default': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'django_db_cache', } } <NEW_LINE> __migration_modules__ = { 'cmsplugin_filer_file': 'cmsplugin_filer_file.migrations_django', 'cmsplugin_filer_folder': 'cmsplugin_filer_folder.migrations_django', 'cmsplugin_filer_link': 'cmsplugin_filer_link.migrations_django', 'cmsplugin_filer_image': 'cmsplugin_filer_image.migrations_django', 'cmsplugin_filer_teaser': 'cmsplugin_filer_teaser.migrations_django', 'cmsplugin_filer_video': 'cmsplugin_filer_video.migrations_django', } <NEW_LINE> __patches__ = ( StackableSettings.patch_apps(additional_apps), StackableSettings.patch_dict('MIGRATION_MODULES', __migration_modules__), StackableSettings.patch_dict('CACHES', _caches_), ) <NEW_LINE> THUMBNAIL_PROCESSORS = ( 'easy_thumbnails.processors.colorspace', 'easy_thumbnails.processors.autocrop', 'filer.thumbnail_processors.scale_and_crop_with_subject_location', 'easy_thumbnails.processors.filters', ) <NEW_LINE> CMSPLUGIN_CASCADE_PLUGINS = ('cmsplugin_cascade.bootstrap3',) <NEW_LINE> DJANGOCMS_YOUTUBE_API_KEY = os.environ.get('DJANGOCMS_YOUTUBE_API_KEY', '')
more CMS plugins configuration for Django CMS 3.2.5 with Django 1.7
6259905830dc7b76659a0d55
class BasicFieldCell2(RNNCell): <NEW_LINE> <INDENT> def __init__(self, input_size, num_units, fields, hidden_size, n_inter=0, keep_prob=1., activation=tanh, state_is_tuple=True): <NEW_LINE> <INDENT> if not state_is_tuple: <NEW_LINE> <INDENT> logging.warn("%s: Using a concatenated state is slower and will soon be " "deprecated. Use state_is_tuple=True.", self) <NEW_LINE> <DEDENT> self._num_units = num_units <NEW_LINE> self._fields = fields <NEW_LINE> self._hidden_size = hidden_size <NEW_LINE> self._input_size = input_size <NEW_LINE> self._n_inter = n_inter <NEW_LINE> self._state_is_tuple = state_is_tuple <NEW_LINE> self._mask = tf.ones(1) <NEW_LINE> self._keep_prob = keep_prob <NEW_LINE> self._activation = activation <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_size(self): <NEW_LINE> <INDENT> return (FieldStateTuple(self._input_size, self._hidden_size) if self._state_is_tuple else self._input_size + self._hidden_size) <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_size(self): <NEW_LINE> <INDENT> return self._num_units <NEW_LINE> <DEDENT> def __call__(self, inputs, state, scope=None): <NEW_LINE> <INDENT> with vs.variable_scope(scope or "basic_field_cell2") as scope: <NEW_LINE> <INDENT> if self._state_is_tuple: <NEW_LINE> <INDENT> prev_inputs, h = state <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prev_inputs = array_ops.slice(state, [0, 0], [-1, self._input_size]) <NEW_LINE> h = array_ops.slice(state, [0, self._input_size], [-1, self._hidden_size]) <NEW_LINE> <DEDENT> w = 1.0*0.5*np.cos(np.linspace(0,2*np.pi,num=self._n_inter+2,endpoint=True)) <NEW_LINE> w = w/np.sum(w) <NEW_LINE> trapezoid_fields = w[0]*self._fields(prev_inputs) <NEW_LINE> scope.reuse_variables() <NEW_LINE> for n in range(1,self._n_inter+2): <NEW_LINE> <INDENT> alpha = n/(self._n_inter+1) <NEW_LINE> inter = (1-alpha)*prev_inputs + alpha*inputs <NEW_LINE> trapezoid_fields += w[n]*self._fields(inter) <NEW_LINE> <DEDENT> d_inputs = (inputs - prev_inputs) <NEW_LINE> path_int = tf.reduce_sum(tf.mul(trapezoid_fields,d_inputs),reduction_indices=-1) <NEW_LINE> path_int = tf.transpose(path_int) <NEW_LINE> mask = tf.nn.dropout(self._mask,keep_prob=self._keep_prob)*self._keep_prob <NEW_LINE> new_h = mask*path_int + h <NEW_LINE> new_inputs = mask*inputs + (1-mask)*prev_inputs <NEW_LINE> <DEDENT> with vs.variable_scope('output_activation',reuse=None): <NEW_LINE> <INDENT> output = _linear(args=new_h,output_size=self._num_units,bias=True) <NEW_LINE> if self._activation is not None: <NEW_LINE> <INDENT> output = self._activation(output) <NEW_LINE> <DEDENT> if self._state_is_tuple: <NEW_LINE> <INDENT> new_state = FieldStateTuple(new_inputs, new_h) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_state = array_ops.concat(1, [new_inputs, new_h]) <NEW_LINE> <DEDENT> return output, new_state
Basic Field recurrent network cell. The implementation is based on: Ian Gemp
62599058a79ad1619776b593
class WebEngineSearch(browsertab.AbstractSearch): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self._flags = QWebEnginePage.FindFlags(0) <NEW_LINE> <DEDENT> def _find(self, text, flags, cb=None): <NEW_LINE> <INDENT> if cb is None: <NEW_LINE> <INDENT> self._widget.findText(text, flags) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._widget.findText(text, flags, cb) <NEW_LINE> <DEDENT> <DEDENT> def search(self, text, *, ignore_case=False, reverse=False, result_cb=None): <NEW_LINE> <INDENT> flags = QWebEnginePage.FindFlags(0) <NEW_LINE> if ignore_case == 'smart': <NEW_LINE> <INDENT> if not text.islower(): <NEW_LINE> <INDENT> flags |= QWebEnginePage.FindCaseSensitively <NEW_LINE> <DEDENT> <DEDENT> elif not ignore_case: <NEW_LINE> <INDENT> flags |= QWebEnginePage.FindCaseSensitively <NEW_LINE> <DEDENT> if reverse: <NEW_LINE> <INDENT> flags |= QWebEnginePage.FindBackward <NEW_LINE> <DEDENT> self.text = text <NEW_LINE> self._flags = flags <NEW_LINE> self._find(text, flags, result_cb) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self._widget.findText('') <NEW_LINE> <DEDENT> def prev_result(self, *, result_cb=None): <NEW_LINE> <INDENT> flags = QWebEnginePage.FindFlags(int(self._flags)) <NEW_LINE> if flags & QWebEnginePage.FindBackward: <NEW_LINE> <INDENT> flags &= ~QWebEnginePage.FindBackward <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flags |= QWebEnginePage.FindBackward <NEW_LINE> <DEDENT> self._find(self.text, flags, result_cb) <NEW_LINE> <DEDENT> def next_result(self, *, result_cb=None): <NEW_LINE> <INDENT> self._find(self.text, self._flags, result_cb)
QtWebEngine implementations related to searching on the page.
6259905882261d6c527309a0
class BottleneckBlockV1(Module): <NEW_LINE> <INDENT> def __init__(self, n_in_channels, n_out_channels): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.conv1 = Conv2d( in_channels=n_in_channels, out_channels=n_out_channels, kernel_size=(1, 1), stride=(1, 1), bias=False ) <NEW_LINE> self.bn1 = BatchNorm2d(num_features=n_out_channels) <NEW_LINE> self.conv2 = Conv2d( in_channels=n_out_channels, out_channels=n_out_channels, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=False ) <NEW_LINE> self.bn2 = BatchNorm2d(num_features=n_out_channels) <NEW_LINE> self.conv3 = Conv2d( in_channels=n_out_channels, out_channels=(n_out_channels * 4), kernel_size=(1, 1), bias=False ) <NEW_LINE> self.bn3 = BatchNorm2d(num_features=(n_out_channels * 4)) <NEW_LINE> self.relu = ReLU() <NEW_LINE> <DEDENT> def forward(self, inputs): <NEW_LINE> <INDENT> layer = self.conv1(inputs) <NEW_LINE> layer = self.bn1(layer) <NEW_LINE> layer = self.relu(layer) <NEW_LINE> layer = self.conv2(layer) <NEW_LINE> layer = self.bn2(layer) <NEW_LINE> layer = self.relu(layer) <NEW_LINE> layer = self.conv3(layer) <NEW_LINE> layer = self.bn3(layer) <NEW_LINE> layer = torch.add(layer, inputs) <NEW_LINE> outputs = self.relu(layer) <NEW_LINE> return outputs
Bottleneck Residual Block As implemented in 'Deep Learning for Image Recognition', the original ResNet paper (https://arxiv.org/abs/1512.03385).
6259905863d6d428bbee3d5d
class CustomEncoder(AbstractTransformer): <NEW_LINE> <INDENT> def __init__(self, encode='onehot', **other): <NEW_LINE> <INDENT> super().__init__(**other) <NEW_LINE> self.encode = encode <NEW_LINE> <DEDENT> def _create_encoder(self): <NEW_LINE> <INDENT> import category_encoders as ce <NEW_LINE> _dict = { "onehot": ce.OneHotEncoder, "binary": ce.BinaryEncoder, "backward": ce.BackwardDifferenceEncoder, "ordinal": ce.OrdinalEncoder, "sum": ce.SumEncoder, "poly": ce.PolynomialEncoder, "helmert": ce.HelmertEncoder, "hash": ce.HashingEncoder, } <NEW_LINE> return _dict[self.encode] <NEW_LINE> <DEDENT> def transform(self, X, **transform_params): <NEW_LINE> <INDENT> to_transform = self._columns_to_apply(X) <NEW_LINE> encoder_type = self._create_encoder() <NEW_LINE> encoder = encoder_type(cols=to_transform) <NEW_LINE> return encoder.fit_transform(X)
encode: "onehot", "binary", "backward", "ordinal", "sum", "poly", "helmert", "hash"
6259905816aa5153ce401a8f
class TranspositionPipeline(pipeline.Pipeline): <NEW_LINE> <INDENT> def __init__(self, transposition_range, name=None): <NEW_LINE> <INDENT> super(TranspositionPipeline, self).__init__( input_type=music_pb2.NoteSequence, output_type=music_pb2.NoteSequence, name=name) <NEW_LINE> self._transposition_range = transposition_range <NEW_LINE> <DEDENT> def transform(self, sequence): <NEW_LINE> <INDENT> stats = dict([(state_name, statistics.Counter(state_name)) for state_name in ['skipped_due_to_range_exceeded', 'transpositions_generated']]) <NEW_LINE> transposed = [] <NEW_LINE> for amount in self._transposition_range: <NEW_LINE> <INDENT> if amount == 0: <NEW_LINE> <INDENT> transposed.append(sequence) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ts = self._transpose(sequence, amount, stats) <NEW_LINE> if ts is not None: <NEW_LINE> <INDENT> transposed.append(ts) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> stats['transpositions_generated'].increment(len(transposed)) <NEW_LINE> self._set_stats(stats.values()) <NEW_LINE> return transposed <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _transpose(ns, amount, stats): <NEW_LINE> <INDENT> ts = copy.deepcopy(ns) <NEW_LINE> for note in ts.notes: <NEW_LINE> <INDENT> note.pitch += amount <NEW_LINE> if (note.pitch < constants.MIN_MIDI_PITCH or note.pitch > constants.MAX_MIDI_PITCH): <NEW_LINE> <INDENT> stats['skipped_due_to_range_exceeded'].increment() <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> return ts
Creates transposed versions of the input NoteSequence.
62599058d7e4931a7ef3d62b
class BasicMatchString(object): <NEW_LINE> <INDENT> def __init__(self, rule: Rule, extra: dict, match: dict): <NEW_LINE> <INDENT> self.rule = rule <NEW_LINE> self.extra = extra <NEW_LINE> self.match = match <NEW_LINE> <DEDENT> def _add_custom_alert_text(self): <NEW_LINE> <INDENT> missing = self.rule.conf('alert_missing_value', '<MISSING VALUE>') <NEW_LINE> alert_text = str(self.rule.conf('alert_text', '')) <NEW_LINE> if self.rule.conf('alert_text_args'): <NEW_LINE> <INDENT> alert_text_args = self.rule.conf('alert_text_args') <NEW_LINE> alert_text_values = [dots_get(self.match, arg) for arg in alert_text_args] <NEW_LINE> for i, text_value in enumerate(alert_text_values): <NEW_LINE> <INDENT> if text_value is None: <NEW_LINE> <INDENT> alert_value = self.rule.conf(alert_text_args[i]) <NEW_LINE> if alert_value: <NEW_LINE> <INDENT> alert_text_values[i] = alert_value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> alert_text_values = [missing if val is None else val for val in alert_text_values] <NEW_LINE> alert_text = alert_text.format(*alert_text_values) <NEW_LINE> <DEDENT> elif self.rule.conf('alert_text_kw'): <NEW_LINE> <INDENT> kw = {} <NEW_LINE> for name, kw_name in self.rule.conf('alert_text_kw').items(): <NEW_LINE> <INDENT> val = dots_get(self.match, name) <NEW_LINE> if val is None: <NEW_LINE> <INDENT> val = self.rule.conf(name) <NEW_LINE> <DEDENT> kw[kw_name] = missing if val is None else val <NEW_LINE> <DEDENT> alert_text = alert_text.format(**kw) <NEW_LINE> <DEDENT> self.text += alert_text <NEW_LINE> <DEDENT> def _add_rule_text(self): <NEW_LINE> <INDENT> self.text += self.rule.get_match_str(self.extra, self.match) <NEW_LINE> <DEDENT> def _add_top_counts(self): <NEW_LINE> <INDENT> for key, counts in self.match.items(): <NEW_LINE> <INDENT> if key.startswith('top_events_'): <NEW_LINE> <INDENT> self.text += '%s:\n' % (key[11:]) <NEW_LINE> top_events = counts.items() <NEW_LINE> if not top_events: <NEW_LINE> <INDENT> self.text += 'No events found.\n' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> top_events.sort(key=lambda x: x[1], reverse=True) <NEW_LINE> for term, count in top_events: <NEW_LINE> <INDENT> self.text += '%s: %s\n' % (term, count) <NEW_LINE> <DEDENT> <DEDENT> self.text += '\n' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> self.text = '' <NEW_LINE> if not self.rule.conf('alert_text'): <NEW_LINE> <INDENT> self.text += self.rule.name + '\n\n' <NEW_LINE> <DEDENT> self._add_custom_alert_text() <NEW_LINE> self.text = _ensure_new_line(self.text) <NEW_LINE> if self.rule.conf('alert_text_type') != 'alert_text_only': <NEW_LINE> <INDENT> self._add_rule_text() <NEW_LINE> self.text = _ensure_new_line(self.text) <NEW_LINE> if self.rule.conf('top_count_keys'): <NEW_LINE> <INDENT> self._add_top_counts() <NEW_LINE> <DEDENT> if self.rule.conf('alert_text_type') != 'exclude_fields': <NEW_LINE> <INDENT> self.text += _format_as_basic_string(self.match) <NEW_LINE> <DEDENT> <DEDENT> return self.text
Creates a string containing fields in match for the given rule.
62599058e64d504609df9ea5
class StructureStructure(orm.Model): <NEW_LINE> <INDENT> _inherit = 'structure.structure' <NEW_LINE> _columns = { 'block_ids': fields.one2many( 'structure.block', 'structure_id', 'Block'), }
Model name: StructureStructure inherited for add 2many relation fields
6259905873bcbd0ca4bcb83f
class StandardGCMCSystemSampler(GCMCSystemSampler): <NEW_LINE> <INDENT> def __init__(self, system, topology, temperature, adams=None, excessChemicalPotential=-6.09*unit.kilocalories_per_mole, standardVolume=30.345*unit.angstroms**3, adamsShift=0.0, boxVectors=None, ghostFile="gcmc-ghost-wats.txt", log='gcmc.log', dcd=None, rst=None, overwrite=False): <NEW_LINE> <INDENT> GCMCSystemSampler.__init__(self, system, topology, temperature, adams=adams, excessChemicalPotential=excessChemicalPotential, standardVolume=standardVolume, adamsShift=adamsShift, boxVectors=boxVectors, ghostFile=ghostFile, log=log, dcd=dcd, rst=rst, overwrite=overwrite) <NEW_LINE> self.energy = None <NEW_LINE> self.logger.info("StandardGCMCSystemSampler object initialised") <NEW_LINE> <DEDENT> def move(self, context, n=1): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> state = self.context.getState(getPositions=True, enforcePeriodicBox=True, getEnergy=True) <NEW_LINE> self.positions = deepcopy(state.getPositions(asNumpy=True)) <NEW_LINE> self.energy = state.getPotentialEnergy() <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> if np.random.randint(2) == 1: <NEW_LINE> <INDENT> self.insertionMove() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.deletionMove() <NEW_LINE> <DEDENT> self.n_moves += 1 <NEW_LINE> self.Ns.append(self.N) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def insertionMove(self): <NEW_LINE> <INDENT> new_positions, resid, atom_indices = self.insertRandomWater() <NEW_LINE> self.adjustSpecificWater(atom_indices, 1.0) <NEW_LINE> self.context.setPositions(new_positions) <NEW_LINE> final_energy = self.context.getState(getEnergy=True).getPotentialEnergy() <NEW_LINE> acc_prob = math.exp(self.B) * math.exp(-(final_energy - self.energy) / self.kT) / (self.N + 1) <NEW_LINE> self.acceptance_probabilities.append(acc_prob) <NEW_LINE> if acc_prob < np.random.rand() or np.isnan(acc_prob): <NEW_LINE> <INDENT> self.adjustSpecificWater(atom_indices, 0.0) <NEW_LINE> self.context.setPositions(self.positions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.positions = deepcopy(new_positions) <NEW_LINE> self.setWaterStatus(resid, 1) <NEW_LINE> self.N += 1 <NEW_LINE> self.n_accepted += 1 <NEW_LINE> self.energy = final_energy <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def deletionMove(self): <NEW_LINE> <INDENT> resid, atom_indices = self.deleteRandomWater() <NEW_LINE> if resid is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> self.adjustSpecificWater(atom_indices, 0.0) <NEW_LINE> final_energy = self.context.getState(getEnergy=True).getPotentialEnergy() <NEW_LINE> acc_prob = self.N * math.exp(-self.B) * math.exp(-(final_energy - self.energy) / self.kT) <NEW_LINE> self.acceptance_probabilities.append(acc_prob) <NEW_LINE> if acc_prob < np.random.rand() or np.isnan(acc_prob): <NEW_LINE> <INDENT> self.adjustSpecificWater(atom_indices, 1.0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setWaterStatus(resid, 0) <NEW_LINE> self.N -= 1 <NEW_LINE> self.n_accepted += 1 <NEW_LINE> self.energy = final_energy <NEW_LINE> <DEDENT> return None
Class to carry out instantaneous GCMC moves in OpenMM
6259905856ac1b37e63037bc
class Hypernode(object): <NEW_LINE> <INDENT> def __init__(self, label): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> self.witness = {} <NEW_LINE> self.outs = [] <NEW_LINE> self.ins = [] <NEW_LINE> <DEDENT> def update_witness(self, nt): <NEW_LINE> <INDENT> if nt in self.witness: <NEW_LINE> <INDENT> del self.witness[nt] <NEW_LINE> <DEDENT> for edge in self.ins: <NEW_LINE> <INDENT> if edge.get_nonterminal() != nt: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> temp_prod = edge.get_pruningbit() * edge.get_probability() <NEW_LINE> if not edge.get_successors(): <NEW_LINE> <INDENT> if self.get_witness(nt)[1] < temp_prod: <NEW_LINE> <INDENT> self.witness[nt] = (edge, temp_prod) <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> for node, n in edge.get_successors(): <NEW_LINE> <INDENT> sub_prod = node.get_witness(n)[1] * temp_prod <NEW_LINE> if self.get_witness(nt)[1] < sub_prod: <NEW_LINE> <INDENT> self.witness[nt] = (edge, sub_prod) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def is_leaf(self): <NEW_LINE> <INDENT> if not self.ins or len(self.ins) == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif not [hn for he in self.ins for hn, _nt in he.get_successors()]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def is_root(self): <NEW_LINE> <INDENT> if not self.outs: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_witness(self, nt): <NEW_LINE> <INDENT> return self.witness.get(nt, (None, 0)) <NEW_LINE> <DEDENT> def get_witnesses(self): <NEW_LINE> <INDENT> return self.witness <NEW_LINE> <DEDENT> def get_ins(self): <NEW_LINE> <INDENT> return self.ins <NEW_LINE> <DEDENT> def get_outs(self): <NEW_LINE> <INDENT> return self.outs <NEW_LINE> <DEDENT> def add_in(self, i): <NEW_LINE> <INDENT> self.ins.append(i) <NEW_LINE> self.update_witness(i.get_nonterminal()) <NEW_LINE> <DEDENT> def add_out(self, o): <NEW_LINE> <INDENT> self.outs.append(o) <NEW_LINE> <DEDENT> def get_label(self): <NEW_LINE> <INDENT> return self.label <NEW_LINE> <DEDENT> def get_subtree(self, nt): <NEW_LINE> <INDENT> edge = self.get_witness(nt)[0] <NEW_LINE> if edge is None: <NEW_LINE> <INDENT> raise ValueError("There is no witness for %s" % nt) <NEW_LINE> <DEDENT> if not edge.get_successors(): <NEW_LINE> <INDENT> stdout.flush() <NEW_LINE> return Tree(edge.get_nonterminal(), [self.get_label()[0]]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = edge.get_successors() <NEW_LINE> return Tree(edge.get_nonterminal(), [t.get_subtree(n) for t, n in s])
The hypernode class representing an item in the hypergraph. Attributes ---------- label : tuple(int) The cover of the hypernode as the label. witness : dict(str, (Hyperedge, double)) The witnesses (ingoing hyperedge) for each nonterminal with their prob. outs : list(Hyperedge) The list of outgoing hyperedges. ins : list(Hyperdge) The list of ingoing hyperedges.
62599058b7558d5895464a01
class getRasAttributes(Handler): <NEW_LINE> <INDENT> def control(self): <NEW_LINE> <INDENT> self.is_valid(self.ras_ip, str) <NEW_LINE> self.is_valid_content(self.ras_ip, self.IP_PATTERN) <NEW_LINE> <DEDENT> def setup(self, ras_ip): <NEW_LINE> <INDENT> self.ras_ip = ras_ip
Get ras attributes method class.
62599058baa26c4b54d50850
@STRICT_MATCH(generic_ids="channel_0x042e") <NEW_LINE> @STRICT_MATCH(channel_names="voc_level") <NEW_LINE> class VOCLevel(Sensor): <NEW_LINE> <INDENT> SENSOR_ATTR = "measured_value" <NEW_LINE> _decimals = 0 <NEW_LINE> _multiplier = 1e6 <NEW_LINE> _unit = CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
VOC Level sensor.
6259905891f36d47f2231966
class ScoreMatrixTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_matrix(self): <NEW_LINE> <INDENT> known_matrix = [[0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 2, 1, 2, 1, 2, 1, 0, 2], [0, 1, 1, 1, 1, 1, 1, 0, 1], [0, 0, 3, 2, 3, 2, 3, 2, 1], [0, 2, 2, 5, 4, 5, 4, 3, 4], [0, 1, 4, 4, 7, 6, 7, 6, 5], [0, 2, 3, 6, 6, 9, 8, 7, 8], [0, 1, 4, 5, 8, 8, 11, 10, 9], [0, 2, 3, 6, 7, 10, 10, 10, 12]] <NEW_LINE> global seq1, seq2 <NEW_LINE> seq1 = 'AGCACACA' <NEW_LINE> seq2 = 'ACACACTA' <NEW_LINE> rows = len(seq1) + 1 <NEW_LINE> cols = len(seq2) + 1 <NEW_LINE> matrix_to_test, max_pos = create_score_matrix(rows, cols) <NEW_LINE> self.assertEqual(known_matrix, matrix_to_test)
Compare the matrix produced by create_score_matrix() with a known matrix.
62599058d6c5a102081e36cd
class Rectangle: <NEW_LINE> <INDENT> number_of_instances = 0 <NEW_LINE> print_symbol = '#' <NEW_LINE> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> Rectangle.number_of_instances += 1 <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print('Bye rectangle...') <NEW_LINE> Rectangle.number_of_instances -= 1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> string = '' <NEW_LINE> if self.__width == 0 or self.__height == 0: <NEW_LINE> <INDENT> return string <NEW_LINE> <DEDENT> string += ((str(self.print_symbol) * self.__width + '\n') * self.__height) <NEW_LINE> return string[:-1] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ('Rectangle(' + str(self.__width) + ', ' + str(self.__height) + ')') <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError('width must be an integer') <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError('width must be >= 0') <NEW_LINE> <DEDENT> self.__width = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError('height must be an integer') <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError('height must be >= 0') <NEW_LINE> <DEDENT> self.__height = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__width * self.__height <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.__width == 0 or self.__height == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return (self.__width + self.__height) * 2 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def bigger_or_equal(rect_1, rect_2): <NEW_LINE> <INDENT> if type(rect_1) != Rectangle: <NEW_LINE> <INDENT> raise TypeError('rect_1 must be an instance of Rectangle') <NEW_LINE> <DEDENT> if type(rect_2) != Rectangle: <NEW_LINE> <INDENT> raise TypeError('rect_2 must be an instance of Rectangle') <NEW_LINE> <DEDENT> if rect_1.area() >= rect_2.area(): <NEW_LINE> <INDENT> return rect_1 <NEW_LINE> <DEDENT> return rect_2
Represents a rectangle.
6259905832920d7e50bc75f3
class VerifyUserToken(graphene.Mutation): <NEW_LINE> <INDENT> class Arguments: <NEW_LINE> <INDENT> userToken = graphene.String() <NEW_LINE> <DEDENT> User = graphene.Field(User) <NEW_LINE> token = graphene.String() <NEW_LINE> ok = graphene.Boolean() <NEW_LINE> def mutate(self, info, userToken): <NEW_LINE> <INDENT> query = User.get_query(info) <NEW_LINE> if not userToken or not UserModel.verify_auth_token(userToken): <NEW_LINE> <INDENT> raise Exception('Your session has expired!') <NEW_LINE> <DEDENT> id = UserModel.verify_auth_token(userToken) <NEW_LINE> user = query.filter(UserModel.id == id).first() <NEW_LINE> ok = True <NEW_LINE> return VerifyUserToken(User=user, token=userToken, ok=ok)
Verify user token
625990587047854f4634096c
class JSONWriter(TrainingMonitor): <NEW_LINE> <INDENT> FILENAME = 'stat.json' <NEW_LINE> def __new__(cls): <NEW_LINE> <INDENT> if logger.LOG_DIR: <NEW_LINE> <INDENT> return super(JSONWriter, cls).__new__(cls) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warn("logger directory was not set. Ignore JSONWriter.") <NEW_LINE> return NoOpMonitor() <NEW_LINE> <DEDENT> <DEDENT> def _setup_graph(self): <NEW_LINE> <INDENT> self._dir = logger.LOG_DIR <NEW_LINE> self._fname = os.path.join(self._dir, self.FILENAME) <NEW_LINE> if os.path.isfile(self._fname): <NEW_LINE> <INDENT> logger.info("Found existing JSON at {}, will append to it.".format(self._fname)) <NEW_LINE> with open(self._fname) as f: <NEW_LINE> <INDENT> self._stats = json.load(f) <NEW_LINE> assert isinstance(self._stats, list), type(self._stats) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> epoch = self._stats[-1]['epoch_num'] + 1 <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info("Found training history from JSON, now starting from epoch number {}.".format(epoch)) <NEW_LINE> self.trainer.config.starting_epoch = epoch <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._stats = [] <NEW_LINE> <DEDENT> self._stat_now = {} <NEW_LINE> self._last_gs = -1 <NEW_LINE> self._total = self.trainer.config.steps_per_epoch <NEW_LINE> <DEDENT> def _trigger_step(self): <NEW_LINE> <INDENT> if self.local_step != self._total - 1: <NEW_LINE> <INDENT> self._push() <NEW_LINE> <DEDENT> <DEDENT> def _trigger_epoch(self): <NEW_LINE> <INDENT> self._push() <NEW_LINE> <DEDENT> def process_scalar(self, name, val): <NEW_LINE> <INDENT> self._stat_now[name] = val <NEW_LINE> <DEDENT> def _push(self): <NEW_LINE> <INDENT> if len(self._stat_now): <NEW_LINE> <INDENT> self._stat_now['epoch_num'] = self.epoch_num <NEW_LINE> self._stat_now['global_step'] = self.global_step <NEW_LINE> self._stats.append(self._stat_now) <NEW_LINE> self._stat_now = {} <NEW_LINE> self._write_stat() <NEW_LINE> <DEDENT> <DEDENT> def _write_stat(self): <NEW_LINE> <INDENT> tmp_filename = self._fname + '.tmp' <NEW_LINE> try: <NEW_LINE> <INDENT> with open(tmp_filename, 'w') as f: <NEW_LINE> <INDENT> json.dump(self._stats, f) <NEW_LINE> <DEDENT> shutil.move(tmp_filename, self._fname) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> logger.exception("Exception in JSONWriter._write_stat()!")
Write all scalar data to a json file under ``logger.LOG_DIR``, grouped by their global step. This monitor also attemps to recover the epoch number during setup, if an existing json file is found at the same place.
62599058be8e80087fbc0630
class BeerListDetailedView(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = BeerList.objects.all() <NEW_LINE> serializer_class = BeerListSerializer
RetrieveUpdateDestroyAPIView handles the http GET, PUT and DELETE requests.
62599058baa26c4b54d50851
class Target: <NEW_LINE> <INDENT> def __init__(self, cluster): <NEW_LINE> <INDENT> self._id = self.__class__._counter <NEW_LINE> self.__class__._counter += 1 <NEW_LINE> self.cluster = cluster <NEW_LINE> self.tracks = {} <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def initial(cluster, filter): <NEW_LINE> <INDENT> self = Target(cluster) <NEW_LINE> self.tracks = {None: Track.initial(self, filter)} <NEW_LINE> self.reset() <NEW_LINE> return self <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def new(cluster, filter, report, sensor): <NEW_LINE> <INDENT> self = Target(cluster) <NEW_LINE> tr = Track.new(self, filter, sensor, report) <NEW_LINE> self.tracks = {report: tr} <NEW_LINE> self.new_tracks[report] = tr <NEW_LINE> return self <NEW_LINE> <DEDENT> def finalize_assignment(self, new_tracks): <NEW_LINE> <INDENT> for tr in self.tracks.values(): <NEW_LINE> <INDENT> tr.children = {r: c for r, c in tr.children.items() if c in new_tracks} <NEW_LINE> <DEDENT> self.tracks = {tr.report: tr for tr in new_tracks} <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def predict(self, dT): <NEW_LINE> <INDENT> for track in self.tracks.values(): <NEW_LINE> <INDENT> track.predict(dT) <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.new_tracks = {} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "T({})".format(self._id)
Class to represent a single MHT target.
625990587cff6e4e811b6ff0
class FileGetter(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, url, start): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.result = None <NEW_LINE> self.starttime = start <NEW_LINE> threading.Thread.__init__(self) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.result = [0] <NEW_LINE> try: <NEW_LINE> <INDENT> if (time.time() - self.starttime) <= 10: <NEW_LINE> <INDENT> f = urlopen(self.url) <NEW_LINE> while 1 and not shutdown_event.isSet(): <NEW_LINE> <INDENT> self.result.append(len(f.read(10240))) <NEW_LINE> if self.result[-1] == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> f.close() <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass
Thread class for retrieving a URL
625990580a50d4780f706895
class TypedSchemaStrategy(SchemaStrategy): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def match_schema(cls, schema): <NEW_LINE> <INDENT> return schema.get('type') == cls.JS_TYPE <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def match_object(cls, obj): <NEW_LINE> <INDENT> return isinstance(obj, cls.PYTHON_TYPE) <NEW_LINE> <DEDENT> def to_schema(self): <NEW_LINE> <INDENT> schema = super().to_schema() <NEW_LINE> schema['type'] = self.JS_TYPE <NEW_LINE> return schema
base schema strategy class for scalar types. Subclasses define these two class constants: * `JS_TYPE`: a valid value of the `type` keyword * `PYTHON_TYPE`: Python type objects - can be a tuple of types
62599058a79ad1619776b594
class event: <NEW_LINE> <INDENT> __metaclass__ = metavent <NEW_LINE> callbacks = defaultdict(list) <NEW_LINE> counter = defaultdict(int) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> raise TypeError("You can't construct event.") <NEW_LINE> <DEDENT> def register(self, callback, to=[]): <NEW_LINE> <INDENT> for item in to: <NEW_LINE> <INDENT> event.callbacks[item].append(callback) <NEW_LINE> <DEDENT> <DEDENT> def count(self, event): <NEW_LINE> <INDENT> return self.counter.get(event, 0) <NEW_LINE> <DEDENT> def create(self, path, ctime=None): <NEW_LINE> <INDENT> if ctime: <NEW_LINE> <INDENT> log.info("create [%.2fs] %s", ctime, path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.info("create %s", path) <NEW_LINE> <DEDENT> <DEDENT> def update(self, path, ctime=None): <NEW_LINE> <INDENT> if ctime: <NEW_LINE> <INDENT> log.info("update [%.2fs] %s", ctime, path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.info("update %s", path) <NEW_LINE> <DEDENT> <DEDENT> def skip(self, path): <NEW_LINE> <INDENT> log.skip("skip %s", path) <NEW_LINE> <DEDENT> def identical(self, path): <NEW_LINE> <INDENT> log.skip("identical %s", path) <NEW_LINE> <DEDENT> def remove(self, path): <NEW_LINE> <INDENT> log.info("remove %s", path)
This helper class provides an easy mechanism to give user feedback of created, changed or deleted files. As side-effect every non-destructive call will add the given path to the global tracking list and makes it possible to remove unused files (e.g. after you've changed your permalink scheme or just reworded your title). .. Note:: This class is a singleton and should not be initialized .. method:: count(event) :param event: count calls of this particular event :type event: string
625990581f037a2d8b9e5342
class HtmlHyperlink(object): <NEW_LINE> <INDENT> swagger_types = { 'anchortext': 'str', 'url': 'str' } <NEW_LINE> attribute_map = { 'anchortext': 'Anchortext', 'url': 'Url' } <NEW_LINE> def __init__(self, anchortext=None, url=None): <NEW_LINE> <INDENT> self._anchortext = None <NEW_LINE> self._url = None <NEW_LINE> self.discriminator = None <NEW_LINE> if anchortext is not None: <NEW_LINE> <INDENT> self.anchortext = anchortext <NEW_LINE> <DEDENT> if url is not None: <NEW_LINE> <INDENT> self.url = url <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def anchortext(self): <NEW_LINE> <INDENT> return self._anchortext <NEW_LINE> <DEDENT> @anchortext.setter <NEW_LINE> def anchortext(self, anchortext): <NEW_LINE> <INDENT> self._anchortext = anchortext <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._url <NEW_LINE> <DEDENT> @url.setter <NEW_LINE> def url(self, url): <NEW_LINE> <INDENT> self._url = url <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(HtmlHyperlink, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, HtmlHyperlink): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259905899cbb53fe683248d
class StickerUpdateView(PermissionRequiredMixin, UpdateView): <NEW_LINE> <INDENT> form_class = StickerForm <NEW_LINE> model = Sticker <NEW_LINE> template_name = 'sticker/sticker_update.html' <NEW_LINE> success_url = reverse_lazy('sticker_list') <NEW_LINE> permission_required = 'sticker.can_make_sticker'
Update sticker when need
6259905823e79379d538daa9
class Transformer(six.with_metaclass(ABCMeta, TransformerMixin)): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def describe(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> if isinstance(X, np.ndarray): <NEW_LINE> <INDENT> if X.ndim == 2: <NEW_LINE> <INDENT> mapped = self._transform_array(X) <NEW_LINE> return mapped <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Input has the wrong shape: %s with %i' ' dimensions. Expecting a matrix (2 dimensions)' % (str(X.shape), X.ndim)) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(X, (list, tuple)): <NEW_LINE> <INDENT> out = [] <NEW_LINE> for x in X: <NEW_LINE> <INDENT> mapped = self._transform_array(x) <NEW_LINE> out.append(mapped) <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Input has the wrong type: %s ' '. Either accepting numpy arrays of dimension 2 ' 'or lists of such arrays' % (str(type(X)))) <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def _transform_array(self, X): <NEW_LINE> <INDENT> raise NotImplementedError()
A transformer takes data and transforms it
62599058627d3e7fe0e0843b
class TagPlaceObject(DisplayListTag): <NEW_LINE> <INDENT> TYPE = 4 <NEW_LINE> hasClipActions = False <NEW_LINE> hasClipDepth = False <NEW_LINE> hasName = False <NEW_LINE> hasRatio = False <NEW_LINE> hasColorTransform = False <NEW_LINE> hasMatrix = False <NEW_LINE> hasCharacter = False <NEW_LINE> hasMove = False <NEW_LINE> hasImage = False <NEW_LINE> hasClassName = False <NEW_LINE> hasCacheAsBitmap = False <NEW_LINE> hasBlendMode = False <NEW_LINE> hasFilterList = False <NEW_LINE> depth = 0 <NEW_LINE> matrix = None <NEW_LINE> colorTransform = None <NEW_LINE> ratio = 0 <NEW_LINE> instanceName = None <NEW_LINE> clipDepth = 0 <NEW_LINE> clipActions = None <NEW_LINE> className = None <NEW_LINE> blendMode = 0 <NEW_LINE> bitmapCache = 0 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._surfaceFilterList = [] <NEW_LINE> super(TagPlaceObject, self).__init__() <NEW_LINE> <DEDENT> def parse(self, data, length, version=1): <NEW_LINE> <INDENT> pos = data.tell() <NEW_LINE> self.characterId = data.readUI16() <NEW_LINE> self.depth = data.readUI16(); <NEW_LINE> self.matrix = data.readMATRIX(); <NEW_LINE> self.hasCharacter = True; <NEW_LINE> self.hasMatrix = True; <NEW_LINE> if data.tell() - pos < length: <NEW_LINE> <INDENT> colorTransform = data.readCXFORM() <NEW_LINE> self.hasColorTransform = True <NEW_LINE> <DEDENT> <DEDENT> def get_dependencies(self): <NEW_LINE> <INDENT> s = super(TagPlaceObject, self).get_dependencies() <NEW_LINE> if self.hasCharacter: <NEW_LINE> <INDENT> s.add(self.characterId) <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> @property <NEW_LINE> def filters(self): <NEW_LINE> <INDENT> return self._surfaceFilterList <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "PlaceObject" <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return TagPlaceObject.TYPE <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = super(TagPlaceObject, self).__str__() + " " + "Depth: %d, " % self.depth + "CharacterID: %d" % self.characterId <NEW_LINE> if self.hasName: <NEW_LINE> <INDENT> s+= ", InstanceName: %s" % self.instanceName <NEW_LINE> <DEDENT> if self.hasMatrix: <NEW_LINE> <INDENT> s += ", Matrix: %s" % self.matrix.__str__() <NEW_LINE> <DEDENT> if self.hasClipDepth: <NEW_LINE> <INDENT> s += ", ClipDepth: %d" % self.clipDepth <NEW_LINE> <DEDENT> if self.hasColorTransform: <NEW_LINE> <INDENT> s += ", ColorTransform: %s" % self.colorTransform.__str__() <NEW_LINE> <DEDENT> if self.hasFilterList: <NEW_LINE> <INDENT> s += ", Filters: %d" % len(self.filters) <NEW_LINE> <DEDENT> if self.hasBlendMode: <NEW_LINE> <INDENT> s += ", Blendmode: %d" % self.blendMode <NEW_LINE> <DEDENT> return s
The PlaceObject tag adds a character to the display list. The CharacterId identifies the character to be added. The Depth field specifies the stacking order of the character. The Matrix field species the position, scale, and rotation of the character. If the size of the PlaceObject tag exceeds the end of the transformation matrix, it is assumed that a ColorTransform field is appended to the record. The ColorTransform field specifies a color effect (such as transparency) that is applied to the character. The same character can be added more than once to the display list with a different depth and transformation matrix.
62599058379a373c97d9a5d2
class RainforestTree(ProceduralTree): <NEW_LINE> <INDENT> branchslope = 1 <NEW_LINE> foliage_shape = [3.4, 2.6] <NEW_LINE> def prepare(self, world): <NEW_LINE> <INDENT> self.species = 3 <NEW_LINE> self.height = randint(10, 20) <NEW_LINE> self.trunkradius = randint(5, 15) <NEW_LINE> ProceduralTree.prepare(self, world) <NEW_LINE> self.trunkradius /= PHI + 1 <NEW_LINE> self.trunkheight *= .9 <NEW_LINE> <DEDENT> def shapefunc(self, y): <NEW_LINE> <INDENT> if y < self.height * 0.8: <NEW_LINE> <INDENT> twigs = ProceduralTree.shapefunc(self,y) <NEW_LINE> if twigs is not None and random() < 0.07: <NEW_LINE> <INDENT> return twigs <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> width = self.height * 1 / (IPHI + 1) <NEW_LINE> topdist = (self.height - y) / (self.height * 0.2) <NEW_LINE> dist = width * (PHI + topdist) * (PHI + random()) * 1 / (IPHI + 1) <NEW_LINE> return dist
A big rainforest tree.
625990583c8af77a43b68a17
class ParserTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.parser = generate.create_parser() <NEW_LINE> <DEDENT> def test_output(self): <NEW_LINE> <INDENT> namespace = self.parser.parse_args(("--output", "a.txt")) <NEW_LINE> self.assertEqual(namespace.output, "a.txt") <NEW_LINE> <DEDENT> def test_model(self): <NEW_LINE> <INDENT> namespace = self.parser.parse_args(("--mode", "model.txt")) <NEW_LINE> self.assertEqual(namespace.model, "model.txt") <NEW_LINE> <DEDENT> def test_length(self): <NEW_LINE> <INDENT> namespace = self.parser.parse_args(("--length", "10")) <NEW_LINE> self.assertEqual(namespace.length, 10)
Новые тесты так и не были придуманны...
6259905807f4c71912bb09e8
class DetectedBreak(proto.Message): <NEW_LINE> <INDENT> class BreakType(proto.Enum): <NEW_LINE> <INDENT> UNKNOWN = 0 <NEW_LINE> SPACE = 1 <NEW_LINE> SURE_SPACE = 2 <NEW_LINE> EOL_SURE_SPACE = 3 <NEW_LINE> HYPHEN = 4 <NEW_LINE> LINE_BREAK = 5 <NEW_LINE> <DEDENT> type_ = proto.Field( proto.ENUM, number=1, enum="TextAnnotation.DetectedBreak.BreakType", ) <NEW_LINE> is_prefix = proto.Field(proto.BOOL, number=2,)
Detected start or end of a structural component. Attributes: type_ (google.cloud.vision_v1p2beta1.types.TextAnnotation.DetectedBreak.BreakType): Detected break type. is_prefix (bool): True if break prepends the element.
6259905821a7993f00c6751b
class UnsupportedPlatform(Exception): <NEW_LINE> <INDENT> def __init__(self, distro, codename, release): <NEW_LINE> <INDENT> self.distro = distro <NEW_LINE> self.codename = codename <NEW_LINE> self.release = release <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{doc}: {distro} {codename} {release}'.format( doc=self.__doc__.strip(), distro=self.distro, codename=self.codename, release=self.release, )
Platform is not supported.
62599058cc0a2c111447c59a
class Read_DAQ(Driver): <NEW_LINE> <INDENT> def __init__(self, device_name): <NEW_LINE> <INDENT> self._daq = Device(device_name) <NEW_LINE> self._tasks = dict() <NEW_LINE> <DEDENT> @Action() <NEW_LINE> def new_task(self, task_name, channels): <NEW_LINE> <INDENT> if task_name in self._tasks: <NEW_LINE> <INDENT> self.clear_task(task_name) <NEW_LINE> <DEDENT> ch0 = channels[0] <NEW_LINE> if ch0 in self._daq.counter_input_channels: <NEW_LINE> <INDENT> task = CounterInputTask(task_name) <NEW_LINE> task_type = 'counter' <NEW_LINE> valid_channels = self._daq.counter_input_channels <NEW_LINE> <DEDENT> elif ch0 in self._daq.analog_input_channels: <NEW_LINE> <INDENT> task = AnalogInputTask(task_name) <NEW_LINE> task_type = 'analog' <NEW_LINE> valid_channels = self._daq.analog_input_channels <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception( 'Cannot identify the type of channel for {}. Channel must be either in {}, or in {}'.format(ch0, self._daq.counter_input_channels, self._daq.analog_input_channels)) <NEW_LINE> <DEDENT> for ch in channels: <NEW_LINE> <INDENT> if ch in valid_channels: <NEW_LINE> <INDENT> ch_obj = CountEdgesChannel(ch) if task_type == 'counter' else VoltageInputChannel(ch) <NEW_LINE> task.add_channel(ch_obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> task.clear() <NEW_LINE> raise Exception('Invalid channel {} for task of type {}'.format(ch, task_type)) <NEW_LINE> <DEDENT> <DEDENT> self._tasks[task_name] = task <NEW_LINE> <DEDENT> @Action() <NEW_LINE> def clear_task(self, task_name): <NEW_LINE> <INDENT> task = self._tasks.pop(task_name) <NEW_LINE> task.clear() <NEW_LINE> <DEDENT> @Action() <NEW_LINE> def clear_all_task(self): <NEW_LINE> <INDENT> for task_name in self._tasks: <NEW_LINE> <INDENT> self.clear_task(task_name) <NEW_LINE> <DEDENT> <DEDENT> @Action() <NEW_LINE> def start(self, task_name): <NEW_LINE> <INDENT> self._tasks[task_name].start() <NEW_LINE> <DEDENT> @Action() <NEW_LINE> def stop(self, task_name): <NEW_LINE> <INDENT> self._tasks[task_name].stop() <NEW_LINE> <DEDENT> @Action() <NEW_LINE> def read(self, task_name, samples_per_channel=None, timeout=Q_(10.0, 's'), group_by='channel'): <NEW_LINE> <INDENT> task = self._tasks[task_name] <NEW_LINE> if task.IO_TYPE == 'AI': <NEW_LINE> <INDENT> return task.read(samples_per_channel=samples_per_channel, timeout=timeout, group_by=group_by) <NEW_LINE> <DEDENT> elif task.IO_TYPE == 'CI': <NEW_LINE> <INDENT> return task.read(samples_per_channel=samples_per_channel, timeout=timeout) <NEW_LINE> <DEDENT> <DEDENT> @Action() <NEW_LINE> def configure_timing_sample_clock(self, task_name, **kwargs): <NEW_LINE> <INDENT> return self._tasks[task_name].configure_timing_sample_clock(**kwargs) <NEW_LINE> <DEDENT> @Action() <NEW_LINE> def get_task_type(self, task_name): <NEW_LINE> <INDENT> return self._tasks[task_name].IO_TYPE
This is a simplified version of a daq drivers which can be used when where only standard read on the daq is necessary
6259905873bcbd0ca4bcb841
class SignInView(TemplateView): <NEW_LINE> <INDENT> template_name = 'signin.html' <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> username = request.POST.get('username') <NEW_LINE> password = request.POST.get('password') <NEW_LINE> user = authenticate(username=username, password=password) <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> if user.is_active: <NEW_LINE> <INDENT> login(request, user) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.error(request, ugettext('UserIsInactive')) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> messages.error(request, ugettext('Invalidlogin')) <NEW_LINE> <DEDENT> return HttpResponseRedirect(reverse_lazy('website:index'))
TemplateView for the user's login
6259905807f4c71912bb09e9
class gmsk_py_cc(gr.sync_block): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> gr.sync_block.__init__(self, name="gmsk_py_cc", in_sig=[numpy.float32], out_sig=[numpy.complex64]) <NEW_LINE> <DEDENT> def gaussian(self,in_signal): <NEW_LINE> <INDENT> filtered=numpy.array([]) <NEW_LINE> filters.gaussian_filter(in_signal, 20, order=0, output=filtered) <NEW_LINE> return filtered <NEW_LINE> <DEDENT> def work(self, input_items, output_items): <NEW_LINE> <INDENT> Fc = 1000 <NEW_LINE> Fbit = 50 <NEW_LINE> Fdev = 5000 <NEW_LINE> N = 100 <NEW_LINE> A = 1 <NEW_LINE> Fs = 10000 <NEW_LINE> A_n = 0.01 <NEW_LINE> N_prntbits = 10 <NEW_LINE> in0 = input_items[0] <NEW_LINE> out = output_items[0] <NEW_LINE> filters.gaussian_filter(in0, 6, order=0, output=in0) <NEW_LINE> c_t=[0 for i in range(len(in0))] <NEW_LINE> for i in range(len(in0)): <NEW_LINE> <INDENT> c_t[i]=c_t[i-1]+in0[i] <NEW_LINE> <DEDENT> c_t=numpy.array(c_t) <NEW_LINE> c_t=10*c_t/Fs <NEW_LINE> out[:] = numpy.cos(c_t)+1j*numpy.sin(c_t) <NEW_LINE> return len(output_items[0])
docstring for block gmsk_py_cc
625990582ae34c7f260ac695
class ElementListResource(ListFilterResource): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ElementListResource, self).__init__(*args, **kwargs) <NEW_LINE> self.filters = [('name', str, ['exact']), ('symbol', str, ['exact']), ('group', str, ['exact', 'contains', 'icontains']), ('molecular_weight', float, ['exact', 'gt', 'gte', 'lt', 'lte']), ('atomic_number', float, ['exact', 'gt', 'gte', 'lt', 'lte']), ] <NEW_LINE> self.model = Element <NEW_LINE> self.initialize_parser() <NEW_LINE> <DEDENT> @marshal_with(element_fields, envelope='data') <NEW_LINE> def get(self): <NEW_LINE> <INDENT> self.query = Element.query <NEW_LINE> self.parse_args_to_query(*self.get_args()) <NEW_LINE> return self.query.all()
Element List Resource
625990583539df3088ecd84a
class ImplicitAllowedMethods(Resource): <NEW_LINE> <INDENT> def render_GET(self, request): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def render_PUT(self, request): <NEW_LINE> <INDENT> pass
A L{Resource} which implicitly defines its allowed methods by defining renderers to handle them.
62599059b57a9660fecd3029
class Role(Base): <NEW_LINE> <INDENT> __tablename__ = "role" <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String) <NEW_LINE> persons = relationship("Person", secondary=person_and_role, back_populates="roles") <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "Role(id=%r, name=%r, n_person=%r)" % ( self.id, self.name, len(self.persons))
人的角色。例如导演, 编剧, 演员。 - 一个Role可能有很多个Person与之关联
6259905891f36d47f2231967
class KeywordSwitchCompleter(ArgSwitchCompleter): <NEW_LINE> <INDENT> baseclass() <NEW_LINE> def complete(self, token, parsed, parser, display=False, **kwargs): <NEW_LINE> <INDENT> return [('[%s=]' if display and not action.was_required else '%s=') % (option[1:]) for action_group in parser._action_groups for action in action_group._group_actions for option in action.option_strings if option.startswith('=' + token) and not self.option_consumed(action, parsed)]
Completes key=value argument switches based on the argparse grammar exposed for a command. TODO: probably more can be shared with ArgSwitchCompleter.
625990596e29344779b01bfa
class cached_property(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.__doc__ = getattr(func, '__doc__') <NEW_LINE> self.func = func <NEW_LINE> <DEDENT> def __get__(self, obj, cls): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> value = obj.__dict__[self.func.__name__] = self.func(obj) <NEW_LINE> return value
A property that is only computed once per instance and then replaces itself with an ordinary attribute. Deleting the attribute resets the property.
625990593cc13d1c6d466cee