code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Cleverbot: <NEW_LINE> <INDENT> def __init__(self, api_key: str, session: aiohttp.ClientSession = None, context: DictContext = None): <NEW_LINE> <INDENT> self.context = context or None <NEW_LINE> self.session = session or None <NEW_LINE> self.api_key = api_key <NEW_LINE> self.api_url = "https://public-api.travitia.xyz/talk" <NEW_LINE> if session and not isinstance(session, aiohttp.ClientSession): <NEW_LINE> <INDENT> raise TypeError("session must be an aiohttp.ClientSession.") <NEW_LINE> <DEDENT> if context: <NEW_LINE> <INDENT> self.set_context(context) <NEW_LINE> <DEDENT> <DEDENT> def set_context(self, context: DictContext): <NEW_LINE> <INDENT> if not isinstance(context, DictContext): <NEW_LINE> <INDENT> raise TypeError("Context passed was not an instance of DictContext.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> <DEDENT> async def ask(self, query: str, id_=None, *, emotion: Emotion = Emotion.neutral): <NEW_LINE> <INDENT> if not self.session: <NEW_LINE> <INDENT> self.session = aiohttp.ClientSession() <NEW_LINE> <DEDENT> if not isinstance(emotion, Emotion): <NEW_LINE> <INDENT> raise ValueError("emotion must be an enum of async_cleverbot.Emotion.") <NEW_LINE> <DEDENT> if isinstance(self.context, DictContext): <NEW_LINE> <INDENT> ctx = self.context.update_context(id_, query) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ctx = dict(text=query) <NEW_LINE> <DEDENT> ctx["emotion"] = emotion.value <NEW_LINE> headers = dict(authorization=self.api_key) <NEW_LINE> async with self.session.post(self.api_url, data=ctx, headers=headers) as req: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resp = await req.json() <NEW_LINE> <DEDENT> except aiohttp.ContentTypeError: <NEW_LINE> <INDENT> raise APIDown("The API is currently not working. Please wait while the devs fix it.") <NEW_LINE> <DEDENT> if resp.get("error") == "Invalid authorization credentials": <NEW_LINE> <INDENT> raise InvalidKey("The API key you provided was invalid.") <NEW_LINE> <DEDENT> if resp.get("response") == "The server returned a malformed response or it is down.": <NEW_LINE> <INDENT> raise APIDown("The API is currently not working. Please wait while the devs fix it.") <NEW_LINE> <DEDENT> <DEDENT> return Response.from_raw(resp) <NEW_LINE> <DEDENT> async def close(self): <NEW_LINE> <INDENT> if self.session: <NEW_LINE> <INDENT> await self.session.close()
The client to use for API interactions.
62599044711fe17d825e1622
class Property: <NEW_LINE> <INDENT> def __init__(self, square_feet='', beds='',baths='',**kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.square_feet = square_feet <NEW_LINE> self.num_bedrooms = beds <NEW_LINE> self.num_baths = baths <NEW_LINE> <DEDENT> def display(self): <NEW_LINE> <INDENT> print("PROPERTY DETAILS") <NEW_LINE> print("================") <NEW_LINE> print("square footage: {}".format(self.square_feet)) <NEW_LINE> print("bedrooms: {}".format(self.num_bedrooms)) <NEW_LINE> print("bathrooms: {}".format(self.num_baths)) <NEW_LINE> print() <NEW_LINE> <DEDENT> def prompt_init(): <NEW_LINE> <INDENT> return dict(square_feet=input("Enter the square feet: "),beds = input("Enter number of bedrooms: "), baths = input("Enter number of baths: ")) <NEW_LINE> <DEDENT> prompt_init = staticmethod(prompt_init)
Class which show property.
62599044d99f1b3c44d069a9
class MaterialFile(models.Model): <NEW_LINE> <INDENT> def _get_file_path(self, filename): <NEW_LINE> <INDENT> request = get_request() <NEW_LINE> if not request: <NEW_LINE> <INDENT> user = User.objects.get(pk=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user = request.user <NEW_LINE> <DEDENT> path = os.path.join(MATERIAL_FILE_PATH, user.username, filename) <NEW_LINE> path = default_storage.get_available_name(path) <NEW_LINE> return os.path.join(path, filename) <NEW_LINE> <DEDENT> file = models.FileField(_('File'), upload_to=_get_file_path) <NEW_LINE> author = models.ForeignKey(User, verbose_name=_('author'), editable=False, related_name="materialfiles") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'materials' <NEW_LINE> ordering = ('-material__pk',) <NEW_LINE> verbose_name = _('MaterialFile') <NEW_LINE> verbose_name_plural = _('MaterialFiles') <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> request = get_request() <NEW_LINE> if request and request.user.is_authenticated(): <NEW_LINE> <INDENT> self.author = request.user <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.author = User.objects.get(pk=1) <NEW_LINE> <DEDENT> super(MaterialFile, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.file.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def extension(self): <NEW_LINE> <INDENT> return os.path.splitext(self.file.name)[1][1:]
A Model for material raw file.
62599044287bf620b6272ef1
class UploadImage(BaseModel): <NEW_LINE> <INDENT> def __init__(self, user_id, new_url): <NEW_LINE> <INDENT> super().__init__('User', 'users') <NEW_LINE> self.user_id = user_id <NEW_LINE> self.new_url = new_url <NEW_LINE> <DEDENT> def updateimage(self): <NEW_LINE> <INDENT> return super().edit('passport_url', self.new_url, self.user_id)
contains methods for uploading an image
6259904476d4e153a661dbfa
class Schedule(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, primary_key=True) <NEW_LINE> start = models.DateField() <NEW_LINE> end = models.DateField() <NEW_LINE> next_run = models.DateTimeField() <NEW_LINE> repeat = models.CharField(max_length=20, choices=REPEAT_CHOICES) <NEW_LINE> periods = models.CharField(max_length=20, choices=PERIOD_CHOICES)
model to store user email delivery schedule Fields descriptions: next_run -- the next time scheduled events should be executed repeat -- when to repeat sending (start + repeat = next_run) periods -- what periods should be included in report
6259904421a7993f00c67273
class SMArtConnectDataStore(SessionDataStore): <NEW_LINE> <INDENT> def _get_chrome_app(self, consumer_key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return models.MachineApp.objects.get(consumer_key=consumer_key, app_type='chrome') <NEW_LINE> <DEDENT> except models.MachineApp.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def _get_token(self, token_str, app=None): <NEW_LINE> <INDENT> kwargs = {'token': token_str} <NEW_LINE> try: <NEW_LINE> <INDENT> ret = models.AccessToken.objects.get(**kwargs) <NEW_LINE> if not ret.smart_connect_p: <NEW_LINE> <INDENT> oauth.report_error( "Not a SMArt Connect Request -- don't treat as one!") <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> except models.AccessToken.DoesNotExist: <NEW_LINE> <INDENT> oauth.report_error( "No token means this isn't a SMArt Connect Request!")
Hybrid datastore that looks for * a chrome app consumer * a smart connect access token.
6259904491af0d3eaad3b12f
class ExponentialBackoff: <NEW_LINE> <INDENT> def __init__(self, base=1, *, integral=False): <NEW_LINE> <INDENT> self._base = base <NEW_LINE> self._exp = 0 <NEW_LINE> self._max = 10 <NEW_LINE> self._reset_time = base * 2**11 <NEW_LINE> self._last_invocation = time.monotonic() <NEW_LINE> rand = random.Random() <NEW_LINE> rand.seed() <NEW_LINE> self._randfunc = rand.randrange if integral else rand.uniform <NEW_LINE> <DEDENT> def delay(self): <NEW_LINE> <INDENT> invocation = time.monotonic() <NEW_LINE> interval = invocation - self._last_invocation <NEW_LINE> self._last_invocation = invocation <NEW_LINE> if interval > self._reset_time: <NEW_LINE> <INDENT> self._exp = 0 <NEW_LINE> <DEDENT> self._exp = min(self._exp + 1, self._max) <NEW_LINE> return self._randfunc(0, self._base * 2**self._exp)
An implementation of the exponential backoff algorithm Provides a convenient interface to implement an exponential backoff for reconnecting or retrying transmissions in a distributed network. Once instantiated, the delay method will return the next interval to wait for when retrying a connection or transmission. The maximum delay increases exponentially with each retry up to a maximum of 2^10 * base, and is reset if no more attempts are needed in a period of 2^11 * base seconds. Parameters ---------- base: :class:`int` The base delay in seconds. The first retry-delay will be up to this many seconds. integral: :class:`bool` Set to True if whole periods of base is desirable, otherwise any number in between may be returned.
625990446e29344779b0195d
class SanscriptTestCase(TestCase): <NEW_LINE> <INDENT> roman = {S.HK, S.IAST, S.SLP1} <NEW_LINE> brahmic = {x for x in S.SCHEMES} - roman <NEW_LINE> def compare_all(self, _from, _to): <NEW_LINE> <INDENT> for group in DATA[_from]: <NEW_LINE> <INDENT> if _to in DATA and group in DATA[_to]: <NEW_LINE> <INDENT> self.compare(_from, _to, group) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def compare(self, _from, _to, group): <NEW_LINE> <INDENT> source = DATA[_from][group] <NEW_LINE> actual = ' '.join(S.transliterate(source, _from, _to).split()) <NEW_LINE> expected = ' '.join(DATA[_to][group].split()) <NEW_LINE> self.assertEqual(expected, actual)
Ordinary :class:`~unittest.TestCase` with some helper data.
6259904450485f2cf55dc291
class ProxyWithoutSlashTestCase(JSONRPCTestCase): <NEW_LINE> <INDENT> def proxy(self): <NEW_LINE> <INDENT> return jsonrpc.Proxy("http://127.0.0.1:%d" % self.port)
Test with proxy that doesn't add a slash.
625990443c8af77a43b688c2
class DeleteReadingOnlySanction(MemberSanctionState): <NEW_LINE> <INDENT> def get_type(self): <NEW_LINE> <INDENT> return _(u"Autorisation d'écrire") <NEW_LINE> <DEDENT> def get_text(self): <NEW_LINE> <INDENT> return self.array_infos.get('unls-text', '') <NEW_LINE> <DEDENT> def get_detail(self): <NEW_LINE> <INDENT> return (_(u'Vous pouvez désormais poster sur les forums, dans les ' u'commentaires d\'articles et tutoriels.')) <NEW_LINE> <DEDENT> def apply_sanction(self, profile, ban): <NEW_LINE> <INDENT> profile.can_write = True <NEW_LINE> profile.end_ban_write = None <NEW_LINE> profile.save() <NEW_LINE> ban.save()
State of the un-sanction reading only.
62599044cad5886f8bdc5a03
class RxPktPerPort(base_tests.SimpleDataPlane): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> logging.info("Running RxPktPerPort test") <NEW_LINE> of_ports = config["port_map"].keys() <NEW_LINE> of_ports.sort() <NEW_LINE> self.assertTrue(len(of_ports) > 1, "Not enough ports for test") <NEW_LINE> delete_all_flows(self.controller) <NEW_LINE> logging.info("Insert a flow with match on ingress_port") <NEW_LINE> logging.info("Send N Packets on an ingress_port P ") <NEW_LINE> logging.info("Send Port_Stats Request for Port P , verify recieved packets counters are incrementing in accordance") <NEW_LINE> (pkt, match ) = wildcard_all_except_ingress(self,of_ports) <NEW_LINE> (counter) = get_portstats(self,of_ports[0]) <NEW_LINE> num_pkts = 5 <NEW_LINE> for pkt_cnt in range(num_pkts): <NEW_LINE> <INDENT> self.dataplane.send(of_ports[0],str(pkt)) <NEW_LINE> <DEDENT> pkts = num_pkts+counter[0] <NEW_LINE> verify_portstats(self,of_ports[0],rx_packets=pkts)
Verify that rx_packets counter in the Port_Stats reply increments when packets are received on a port
62599044b57a9660fecd2d88
class LutAsShiftReg(Unit): <NEW_LINE> <INDENT> def _config(self) -> None: <NEW_LINE> <INDENT> self.DATA_WIDTH = Param(1) <NEW_LINE> self.ITEMS = Param(16) <NEW_LINE> self.INIT = Param(None) <NEW_LINE> <DEDENT> def _declr(self) -> None: <NEW_LINE> <INDENT> self.clk = Clk() <NEW_LINE> self.d_in = VldSynced() <NEW_LINE> self.d_in.DATA_WIDTH = self.DATA_WIDTH <NEW_LINE> self.d_out_addr = VectSignal(log2ceil(self.ITEMS)) <NEW_LINE> self.d_out = Signal(Bits(self.DATA_WIDTH))._m() <NEW_LINE> <DEDENT> def _impl(self) -> None: <NEW_LINE> <INDENT> out = [] <NEW_LINE> for i in range(self.DATA_WIDTH): <NEW_LINE> <INDENT> mem = self._sig(f"mem{i:d}", Bits(self.ITEMS), def_val=self.INIT) <NEW_LINE> If(self.clk._onRisingEdge(), If(self.d_in.vld, mem(Concat(mem[mem._dtype.bit_length() - 1:], self.d_in.data[i],)) ) ) <NEW_LINE> out.append(mem[self.d_out_addr]) <NEW_LINE> <DEDENT> self.d_out(Concat(*reversed(out)))
This components generates SRL16E and other shift registers. In order to allow Xilinx Vivado 2020.2 (and possibly any other version) to map this component into SRL16E and equivalents we need to satisfy several conditions: 1. the memory must not have reset 2. the shift expressions must be performed on a single signal 3. whole memory must be single signal 4. the output must be read only by index operator (switch on address does not work) 5. we can not merge memories of individual data bits
6259904466673b3332c31705
class HelloApiView(APIView): <NEW_LINE> <INDENT> serializer_class = serializers.HelloSerializer <NEW_LINE> def get(self, request, format=None): <NEW_LINE> <INDENT> an_apiview = [ 'Uses HTTP methods as function (get, post, patch, put, delete)', 'It is similar to traditional django view', 'Gives you the most control over your logic', 'Is mapped manually to URLs', ] <NEW_LINE> return Response({'message':'Hello World!', 'an_api':an_apiview}) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> serializer = serializers.HelloSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> name = serializer.data.get("name") <NEW_LINE> message = 'Hello {0}'.format(name) <NEW_LINE> return Response({'message':message}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response(serializer.errors ,status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> <DEDENT> def put(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method':'put'}) <NEW_LINE> <DEDENT> def patch(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method':'patch'}) <NEW_LINE> <DEDENT> def delete(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'method':'delete'})
Test API View
625990440fa83653e46f61e6
class Record: <NEW_LINE> <INDENT> def __init__(self, filepath, creation_time, duration): <NEW_LINE> <INDENT> self.filepath = filepath <NEW_LINE> self.creation_time = creation_time <NEW_LINE> self.duration = duration <NEW_LINE> self.finish_time = creation_time + duration <NEW_LINE> <DEDENT> def overlap_with(self, record): <NEW_LINE> <INDENT> return ((self.creation_time < record.finish_time) and (record.creation_time < self.finish_time))
Util struct for working with record data.
62599044d53ae8145f91976a
class Npc(object): <NEW_LINE> <INDENT> def __init__(self, image_name, x, y, message_1, message_2, name): <NEW_LINE> <INDENT> super(Npc, self).__init__() <NEW_LINE> self.image_name = image_name <NEW_LINE> self.image = pygame.image.load(image_name).convert_alpha() <NEW_LINE> self.image = pygame.transform.scale(self.image, (50, 50)) <NEW_LINE> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.message_1 = translate(message_1) <NEW_LINE> self.message_2 = translate(message_2) <NEW_LINE> self.message_3 = translate("Quest not yet completed.") <NEW_LINE> self.name = translate(name) <NEW_LINE> <DEDENT> def complete_quest(self): <NEW_LINE> <INDENT> self.message_3 = translate("Quest completed!")
docstring for Npc
625990446fece00bbaccccbf
class OAuthCodeExchangeHandler(OAuthBaseRequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> code = self.request.get('code') <NEW_LINE> if not code: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> oauth_flow = self.create_oauth_flow() <NEW_LINE> try: <NEW_LINE> <INDENT> creds = oauth_flow.step2_exchange(code) <NEW_LINE> <DEDENT> except FlowExchangeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> users_service = util.create_service('oauth2', 'v2', creds) <NEW_LINE> user = users_service.userinfo().get().execute() <NEW_LINE> userid = user.get('id') <NEW_LINE> StorageByKeyName(Credentials, userid, 'credentials').put(creds) <NEW_LINE> logging.info('Successfully stored credentials for user: %s', userid) <NEW_LINE> util.store_userid(self, userid) <NEW_LINE> self._perform_post_auth_tasks(userid, creds) <NEW_LINE> self.redirect('/') <NEW_LINE> <DEDENT> def _perform_post_auth_tasks(self, userid, creds): <NEW_LINE> <INDENT> mirror_service = util.create_service('mirror', 'v1', creds) <NEW_LINE> hostname = util.get_full_url(self, '') <NEW_LINE> if hostname.startswith('https://'): <NEW_LINE> <INDENT> subscription_body = { 'collection': 'timeline', 'userToken': userid, 'callbackUrl': util.get_full_url(self, '/notify') } <NEW_LINE> mirror_service.subscriptions().insert(body=subscription_body).execute() <NEW_LINE> contact_body = { 'id': 'python-quick-start', 'displayName': 'Python Quick Start', 'imageUrls': [util.get_full_url(self, '/static/images/python.png')], 'acceptCommands': [{ 'type': 'TAKE_A_NOTE' }] } <NEW_LINE> mirror_service.contacts().insert(body=contact_body).execute() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info('Post auth tasks are not supported on staging.') <NEW_LINE> <DEDENT> timeline_item_body = { 'text': 'Welcome to the Python Quick Start', 'notification': { 'level': 'DEFAULT' } } <NEW_LINE> mirror_service.timeline().insert(body=timeline_item_body).execute()
Request handler for OAuth 2.0 code exchange.
62599044d99f1b3c44d069aa
class Tag( DictSchema ): <NEW_LINE> <INDENT> id = Int64Schema <NEW_LINE> name = StrSchema <NEW_LINE> def __new__( cls, *args: typing.Union[dict, frozendict, ], id: typing.Union[id, Unset] = unset, name: typing.Union[name, Unset] = unset, _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Type[Schema], ) -> 'Tag': <NEW_LINE> <INDENT> return super().__new__( cls, *args, id=id, name=name, _configuration=_configuration, **kwargs, )
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259904415baa7234946329e
class WinShiftLoseStay(MemoryOnePlayer): <NEW_LINE> <INDENT> name = 'Win-Shift Lose-Stay' <NEW_LINE> classifier = { 'memory_depth': 1, 'stochastic': False, 'makes_use_of': set(), 'long_run_time': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } <NEW_LINE> @init_args <NEW_LINE> def __init__(self, initial=D): <NEW_LINE> <INDENT> Player.__init__(self) <NEW_LINE> self.set_four_vector([0, 1, 1, 0]) <NEW_LINE> self._initial = initial
Win-Shift Lose-Stay, also called Reverse Pavlov. For reference see: "Engineering Design of Strategies for Winning Iterated Prisoner's Dilemma Competitions" by Jiawei Li, Philip Hingston, and Graham Kendall. IEEE TRANSACTIONS ON COMPUTATIONAL INTELLIGENCE AND AI IN GAMES, VOL. 3, NO. 4, DECEMBER 2011
6259904473bcbd0ca4bcb598
class RunTable(OrgObjPermsMixin, SmartReadView): <NEW_LINE> <INDENT> paginate_by = 100 <NEW_LINE> def get_context_data(self, *args, **kwargs): <NEW_LINE> <INDENT> context = super(FlowCRUDL.RunTable, self).get_context_data(*args, **kwargs) <NEW_LINE> flow = self.get_object() <NEW_LINE> org = self.derive_org() <NEW_LINE> context['rulesets'] = list(flow.rule_sets.filter(ruleset_type__in=RuleSet.TYPE_WAIT).order_by('y')) <NEW_LINE> for ruleset in context['rulesets']: <NEW_LINE> <INDENT> rules = len(ruleset.get_rules()) <NEW_LINE> ruleset.category = 'true' if rules > 1 else 'false' <NEW_LINE> <DEDENT> test_contacts = Contact.objects.filter(org=org, is_test=True).values_list('id', flat=True) <NEW_LINE> runs = FlowRun.objects.filter(flow=flow, responded=True).exclude(contact__in=test_contacts) <NEW_LINE> modified_on = self.request.GET.get('modified_on', None) <NEW_LINE> if modified_on: <NEW_LINE> <INDENT> id = self.request.GET['id'] <NEW_LINE> modified_on = datetime.fromtimestamp(int(modified_on), flow.org.timezone) <NEW_LINE> runs = runs.filter(modified_on__lt=modified_on).exclude(modified_on=modified_on, id__lt=id) <NEW_LINE> <DEDENT> runs = list(runs.order_by('-modified_on')[:self.paginate_by]) <NEW_LINE> for run in runs: <NEW_LINE> <INDENT> values = {v.ruleset.uuid: v for v in Value.objects.filter(run=run, ruleset__in=context['rulesets']).select_related('ruleset')} <NEW_LINE> run.value_list = [] <NEW_LINE> for ruleset in context['rulesets']: <NEW_LINE> <INDENT> value = values.get(ruleset.uuid) <NEW_LINE> run.value_list.append(value) <NEW_LINE> <DEDENT> <DEDENT> context['runs'] = runs <NEW_LINE> return context
Intercooler helper which renders rows of runs to be embedded in an existing table with infinite scrolling
6259904415baa7234946329f
class RegSet(set): <NEW_LINE> <INDENT> def add(self, reg): <NEW_LINE> <INDENT> set.add(self, Register(reg.reg))
Discards swizzle, negate and absolute value modifiers to considder register uniqueness
62599044d7e4931a7ef3d383
class VolumeQuotasClientJSON(BaseVolumeQuotasClientJSON): <NEW_LINE> <INDENT> pass
Client class to send CRUD Volume Type API V1 requests to a Cinder endpoint
625990443eb6a72ae038b96e
class StudyDeviceSettingsCollection( DatabaseCollection ): <NEW_LINE> <INDENT> OBJTYPE = StudyDeviceSettings
The per-study device settings.
62599044e64d504609df9d57
class GroupsError(Exception): <NEW_LINE> <INDENT> pass
Generic exception wrapped around any response from the Groups server.
625990444e696a045264e7a7
class Solution: <NEW_LINE> <INDENT> def sortColors(self, nums): <NEW_LINE> <INDENT> start = i = 0 <NEW_LINE> end = len(nums) - 1 <NEW_LINE> while i <= end: <NEW_LINE> <INDENT> if nums[i] == 0: <NEW_LINE> <INDENT> nums[i], nums[start] = nums[start], nums[i] <NEW_LINE> i += 1 <NEW_LINE> start += 1 <NEW_LINE> <DEDENT> elif nums[i] == 1: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> nums[i], nums[end] = nums[end], nums[i] <NEW_LINE> end -= 1
@param nums: A list of integer which is 0, 1 or 2 @return: nothing
625990441f5feb6acb163f01
class HistStack(object): <NEW_LINE> <INDENT> def __init__(self, hists=None, title=None, xlabel=None, ylabel=None): <NEW_LINE> <INDENT> self.hists = [] <NEW_LINE> self.kwargs = [] <NEW_LINE> self.title = title <NEW_LINE> self.xlabel = xlabel <NEW_LINE> self.ylabel = ylabel <NEW_LINE> if hists: <NEW_LINE> <INDENT> for hist in hists: <NEW_LINE> <INDENT> self.add(hist) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self.hists[index] <NEW_LINE> <DEDENT> def __setitem__(self, index, value): <NEW_LINE> <INDENT> self.hists[index] = value <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.hists) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.hists) <NEW_LINE> <DEDENT> def max(self): <NEW_LINE> <INDENT> maxes = [max(x) for x in self.hists] <NEW_LINE> try: <NEW_LINE> <INDENT> return max(maxes) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def stackmax(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return max([sum([h[i] for h in self.hists]) for i in range(self.hists[0].nbins)]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print([h.nbins for h in self.hists]) <NEW_LINE> <DEDENT> <DEDENT> def scale(self, factor): <NEW_LINE> <INDENT> for hist in self.hists: <NEW_LINE> <INDENT> hist.scale(factor) <NEW_LINE> <DEDENT> <DEDENT> def min(self, threshold=None): <NEW_LINE> <INDENT> mins = [x.min(threshold) for x in self.hists] <NEW_LINE> return min(mins) <NEW_LINE> <DEDENT> def add(self, hist, **kwargs): <NEW_LINE> <INDENT> if "label" in kwargs: <NEW_LINE> <INDENT> hist.label = kwargs['label'] <NEW_LINE> del kwargs['label'] <NEW_LINE> <DEDENT> if len(self) > 0: <NEW_LINE> <INDENT> if hist.xedges != self.hists[0].xedges: <NEW_LINE> <INDENT> raise ValueError("Cannot add %s to stack; all Hists must " "have the same binning." % hist.name) <NEW_LINE> <DEDENT> <DEDENT> self.hists.append(hist) <NEW_LINE> self.kwargs.append(kwargs)
A container to hold Hist objects for plotting together. When plotting, the title and the x and y labels of the last Hist added will be used unless specified otherwise in the constructor.
62599044cad5886f8bdc5a04
class AbstractMelonOrder(object): <NEW_LINE> <INDENT> def __init__(self, species, qty, melon_type, country_code=None): <NEW_LINE> <INDENT> self.species = species <NEW_LINE> self.qty = qty <NEW_LINE> self.shipped = False <NEW_LINE> self.type = melon_type <NEW_LINE> self.country_code = country_code <NEW_LINE> <DEDENT> def get_total(self): <NEW_LINE> <INDENT> base_price = 5 <NEW_LINE> total = (1 + self.tax) * self.qty * base_price <NEW_LINE> return total <NEW_LINE> <DEDENT> def mark_shipped(self): <NEW_LINE> <INDENT> self.shipped = True
Super class for all melon types
62599044498bea3a75a58e2a
class TableBreadcrumb(ViewBreadcrumb): <NEW_LINE> <INDENT> grok.adapts( icemac.addressbook.browser.table.Table, icemac.addressbook.browser.interfaces.IAddressBookLayer)
View for views based on the `Table` class and its subclasses.
625990448e71fb1e983bcddc
class NodeAttribute(NameParsed): <NEW_LINE> <INDENT> _parser = AttributeNameParser <NEW_LINE> _accepts = ('MayaNodePath', 'AttrSep', 'AttributePath') <NEW_LINE> @property <NEW_LINE> def parts(self): <NEW_LINE> <INDENT> return self.sub <NEW_LINE> <DEDENT> @property <NEW_LINE> def separator(self): <NEW_LINE> <INDENT> return AttrSep() <NEW_LINE> <DEDENT> @property <NEW_LINE> def nodePath(self): <NEW_LINE> <INDENT> return self.parts[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def attribute(self): <NEW_LINE> <INDENT> attr = self.parts[2] <NEW_LINE> if not attr.isCompound(): <NEW_LINE> <INDENT> return attr.last <NEW_LINE> <DEDENT> return attr <NEW_LINE> <DEDENT> def shortName(self): <NEW_LINE> <INDENT> new = self.copy() <NEW_LINE> for i in range(len(new.nodePath.nodes) - 1): <NEW_LINE> <INDENT> new.nodePath.popNode(0) <NEW_LINE> <DEDENT> return new <NEW_LINE> <DEDENT> @property <NEW_LINE> def attributes(self): <NEW_LINE> <INDENT> attr = self.attribute <NEW_LINE> if isinstance(attr, Attribute): <NEW_LINE> <INDENT> return (attr,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.attribute.attributes <NEW_LINE> <DEDENT> <DEDENT> def popNode(self): <NEW_LINE> <INDENT> self.nodePath.popNode()
The name of a Maya node and attribute (plug): a MayaNodePath followed by a AttrSep and a AttributePath Rule : NodeAttribute = `MayaNodePath` `AttrSep` `AttributePath` Composed Of: `MayaNodePath`, `AttrSep`, `AttributePath` Component Of: `MayaObjectName` >>> nodeAttr = NodeAttribute( 'persp|perspShape.focalLength' ) >>> nodeAttr.attributes (Attribute('focalLength', 17),) >>> nodeAttr.nodePath MayaNodePath('persp|perspShape', 0) >>> nodeAttr.shortName() NodeAttribute('perspShape.focalLength', 0) >>> >>> nodeAttr2 = NodeAttribute( 'persp.translate.tx' ) >>> nodeAttr2.attributes (Attribute('translate', 6), Attribute('tx', 16))
625990440fa83653e46f61e8
class res_partner_loan(orm.Model): <NEW_LINE> <INDENT> _name = 'res.partner' <NEW_LINE> _inherit = 'res.partner' <NEW_LINE> _columns = { 'followup_ids': fields.one2many('res.partner.followup', 'partner_id', 'Followup'), }
Add extra relation to partner obj
62599044711fe17d825e1624
class CellsConsoleauthTestCase(ConsoleauthTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(CellsConsoleauthTestCase, self).setUp() <NEW_LINE> self.flags(enable=True, group='cells') <NEW_LINE> self.is_cells = True <NEW_LINE> <DEDENT> def _stub_validate_console_port(self, result): <NEW_LINE> <INDENT> def fake_validate_console_port(ctxt, instance_uuid, console_port, console_type): <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> self.stubs.Set(self.manager.cells_rpcapi, 'validate_console_port', fake_validate_console_port)
Test Case for consoleauth w/ cells enabled.
625990440a366e3fb87ddcf3
class BadCounterName(ActionError): <NEW_LINE> <INDENT> pass
Raised when a counter name is invalid.
6259904410dbd63aa1c71ee7
class Opaque(Compound): <NEW_LINE> <INDENT> fields=('data','otype') <NEW_LINE> __metaclass__=MetaClass <NEW_LINE> def getImage(self): <NEW_LINE> <INDENT> import Image <NEW_LINE> from StringIO import StringIO <NEW_LINE> return Image.open(StringIO(makeData(self.getData()).data().data)) <NEW_LINE> <DEDENT> def fromFile(filename,typestring): <NEW_LINE> <INDENT> import numpy as _N <NEW_LINE> f = open(filename,'rb') <NEW_LINE> try: <NEW_LINE> <INDENT> opq=Opaque(makeData(_N.fromstring(f.read(),dtype="uint8")),typestring) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> f.close() <NEW_LINE> <DEDENT> return opq <NEW_LINE> <DEDENT> fromFile=staticmethod(fromFile)
An Opaque object containing a binary uint8 array and a string identifying the type.
6259904476d4e153a661dbfc
class ScriptExecutionParameter(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, 'type': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> _subtype_map = { 'type': {'Credential': 'PSCredentialExecutionParameter', 'SecureValue': 'ScriptSecureStringExecutionParameter', 'Value': 'ScriptStringExecutionParameter'} } <NEW_LINE> def __init__( self, *, name: str, **kwargs ): <NEW_LINE> <INDENT> super(ScriptExecutionParameter, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.type = None
The arguments passed in to the execution. You probably want to use the sub-classes and not this class directly. Known sub-classes are: PSCredentialExecutionParameter, ScriptSecureStringExecutionParameter, ScriptStringExecutionParameter. All required parameters must be populated in order to send to Azure. :param name: Required. The parameter name. :type name: str :param type: Required. The type of execution parameter.Constant filled by server. Possible values include: "Value", "SecureValue", "Credential". :type type: str or ~azure.mgmt.avs.models.ScriptExecutionParameterType
625990441d351010ab8f4e2d
class SimplePostsCache(object): <NEW_LINE> <INDENT> def __init__(self, refresh_rate=_1_MINUTE): <NEW_LINE> <INDENT> self.cache = {cat_id : list() for cat_name, cat_id in categories.items()} <NEW_LINE> self.refresh_rate = refresh_rate <NEW_LINE> self.last_refresh = {cat_id : datetime.now() for cat_name, cat_id in categories.items()} <NEW_LINE> for cat_id in self.last_refresh: <NEW_LINE> <INDENT> self.refresh_cache(cat_id) <NEW_LINE> <DEDENT> <DEDENT> def refresh_cache(self, cat_id): <NEW_LINE> <INDENT> self.cache[cat_id] = most_recent_25_posts_by_category(cat_id) <NEW_LINE> self.last_refresh[cat_id] = datetime.now() <NEW_LINE> print ('Cache refresh at...', str(self.last_refresh[cat_id])) <NEW_LINE> <DEDENT> def get_post(self, category): <NEW_LINE> <INDENT> cat_id = categories[category] <NEW_LINE> if ((datetime.now() - self.last_refresh[cat_id]).seconds > self.refresh_rate): <NEW_LINE> <INDENT> self.refresh_cache(cat_id) <NEW_LINE> <DEDENT> return random.choice(self.cache[cat_id])['post']
Seconds
6259904423849d37ff8523c9
@expand_message_class <NEW_LINE> class CredGetList(AdminHolderMessage): <NEW_LINE> <INDENT> message_type = "credentials-get-list" <NEW_LINE> class Fields: <NEW_LINE> <INDENT> paginate = fields.Nested( Paginate.Schema, required=False, data_key="~paginate", missing=Paginate(limit=10, offset=0), description="Pagination decorator.", ) <NEW_LINE> states = fields.List( fields.Str(required=True), required=False, example=["offer_received"], description="Filter listed credentials by state.", validate=validate.OneOf( [ CredExRecord.STATE_ACKED, CredExRecord.STATE_CREDENTIAL_RECEIVED, CredExRecord.STATE_ISSUED, CredExRecord.STATE_OFFER_RECEIVED, CredExRecord.STATE_OFFER_SENT, CredExRecord.STATE_PROPOSAL_RECEIVED, CredExRecord.STATE_PROPOSAL_SENT, CredExRecord.STATE_REQUEST_RECEIVED, CredExRecord.STATE_REQUEST_SENT, ] ), ) <NEW_LINE> <DEDENT> def __init__( self, paginate: Paginate = None, states: Optional[List[str]] = None, **kwargs ): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.paginate = paginate <NEW_LINE> self.states = states <NEW_LINE> <DEDENT> @log_handling <NEW_LINE> @admin_only <NEW_LINE> async def handle(self, context: RequestContext, responder: BaseResponder): <NEW_LINE> <INDENT> session = await context.session() <NEW_LINE> credentials = await CredExRecord.query( session, post_filter_positive={"role": CredExRecord.ROLE_HOLDER} ) <NEW_LINE> if self.states: <NEW_LINE> <INDENT> credentials = [c for c in credentials if c.state in self.states] <NEW_LINE> <DEDENT> credentials, page = self.paginate.apply(credentials) <NEW_LINE> cred_list = CredList( results=[credential.serialize() for credential in credentials], page=page ) <NEW_LINE> cred_list.assign_thread_from(context.message) <NEW_LINE> await responder.send_reply(cred_list)
Credential list retrieval message.
62599044507cdc57c63a60ab
class SearchDocument(PolyModel): <NEW_LINE> <INDENT> def deleteDocument(self, doc_id): <NEW_LINE> <INDENT> index = search.Index(name=self.index_name) <NEW_LINE> index.delete(doc_id) <NEW_LINE> <DEDENT> def processDocuments(self, documents): <NEW_LINE> <INDENT> entities = [doc.fields for doc in documents] <NEW_LINE> doc_ids = [doc.doc_id for doc in documents] <NEW_LINE> documents_index = 0 <NEW_LINE> result = [] <NEW_LINE> entity_fields = {} <NEW_LINE> for entity in entities: <NEW_LINE> <INDENT> for field in entity: <NEW_LINE> <INDENT> entity_fields[field.name] = field.value <NEW_LINE> <DEDENT> entity_fields['id'] = doc_ids[documents_index] <NEW_LINE> result.append(entity_fields) <NEW_LINE> documents_index = documents_index + 1 <NEW_LINE> entity_fields = {} <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def saveDocument(self, document): <NEW_LINE> <INDENT> index = search.Index(name=self.index_name) <NEW_LINE> index.put(document) <NEW_LINE> <DEDENT> def updateDocument(self, entity, has_changes=has_changes): <NEW_LINE> <INDENT> index = search.Index(name=self.index_name) <NEW_LINE> doc = index.get(entity.key.urlsafe()) <NEW_LINE> if not doc is None and not doc is type(None): <NEW_LINE> <INDENT> if(has_changes(doc.fields, entity)): <NEW_LINE> <INDENT> index.delete(entity.key.urlsafe()) <NEW_LINE> self.createDocument(entity) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logging.warning("Update document of {} was not possible. The document returned None.", self.index_name)
Search's model.
62599044a8ecb0332587251f
class Authentications(BaseResource): <NEW_LINE> <INDENT> RESOURCE_NAME = 'authentications' <NEW_LINE> def create(self, data): <NEW_LINE> <INDENT> return self._request('POST', self.uri(), data)
The authentications resource. This resource is used to authenticate actions taken by users, such as displaying an authenticated light box.
6259904473bcbd0ca4bcb59b
class TestAddStock(unittest.TestCase): <NEW_LINE> <INDENT> def test_default(self): <NEW_LINE> <INDENT> stocks = [TEA, ] <NEW_LINE> result = add_stock(stocks, POP) <NEW_LINE> self.assertEqual(2, len(result)) <NEW_LINE> self.assertEqual(1, len(stocks)) <NEW_LINE> <DEDENT> def test_args(self): <NEW_LINE> <INDENT> self.assertRaises(TypeError, add_stock, [], {}) <NEW_LINE> <DEDENT> def test_stock_already_exists(self): <NEW_LINE> <INDENT> stocks = [TEA, ] <NEW_LINE> self.assertRaises(ValueError, add_stock, stocks, TEA) <NEW_LINE> <DEDENT> def test_stock_type(self): <NEW_LINE> <INDENT> stocks = [TEA, ] <NEW_LINE> new_stock = copy(TEA) <NEW_LINE> new_stock.symbol = 'FOO' <NEW_LINE> new_stock.stock_type = 'Apples' <NEW_LINE> self.assertRaises(ValueError, add_stock, stocks, new_stock) <NEW_LINE> new_stock.stock_type = 'Common' <NEW_LINE> result = add_stock(stocks, new_stock) <NEW_LINE> self.assertEqual(2, len(result)) <NEW_LINE> self.assertEqual(1, len(stocks)) <NEW_LINE> new_stock.symbol = 'BAR' <NEW_LINE> new_stock.stock_type = 'Preferred' <NEW_LINE> result = add_stock(stocks, new_stock) <NEW_LINE> self.assertEqual(2, len(result)) <NEW_LINE> self.assertEqual(1, len(stocks))
Tests for the `add_stock` function.
62599044d7e4931a7ef3d385
class V1ClientIPConfig(object): <NEW_LINE> <INDENT> swagger_types = { 'timeout_seconds': 'int' } <NEW_LINE> attribute_map = { 'timeout_seconds': 'timeoutSeconds' } <NEW_LINE> def __init__(self, timeout_seconds=None): <NEW_LINE> <INDENT> self._timeout_seconds = None <NEW_LINE> self.discriminator = None <NEW_LINE> if timeout_seconds is not None: <NEW_LINE> <INDENT> self.timeout_seconds = timeout_seconds <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def timeout_seconds(self): <NEW_LINE> <INDENT> return self._timeout_seconds <NEW_LINE> <DEDENT> @timeout_seconds.setter <NEW_LINE> def timeout_seconds(self, timeout_seconds): <NEW_LINE> <INDENT> self._timeout_seconds = timeout_seconds <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in self.swagger_types.items(): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1ClientIPConfig): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990444e696a045264e7a8
class ZombieApp(): <NEW_LINE> <INDENT> WEAPONS_PATH = "../resources/weapons.json" <NEW_LINE> IN_ENCOUNTERS_PATH = "../resources/in_encounters.json" <NEW_LINE> OUT_ENCOUNTERS_PATH = "../resources/out_encounters.json" <NEW_LINE> FOOD_PATH = "../resources/food.json" <NEW_LINE> GAS_PATH = "../resources/gas.json" <NEW_LINE> MATERIALS_PATH = "../resources/materials.json" <NEW_LINE> AMMO_PATH = "../resources/ammo.json" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.weapon_gen = ItemGenerator(self.WEAPONS_PATH) <NEW_LINE> self.in_enc_gen = ItemGenerator(self.IN_ENCOUNTERS_PATH) <NEW_LINE> self.out_enc_gen = ItemGenerator(self.OUT_ENCOUNTERS_PATH) <NEW_LINE> self.food_gen = AmountGenerator(self.FOOD_PATH) <NEW_LINE> self.gas_gen = AmountGenerator(self.GAS_PATH) <NEW_LINE> self.mater_gen = AmountGenerator(self.MATERIALS_PATH) <NEW_LINE> self.ammo_gen = ItemAmountGenerator(self.AMMO_PATH) <NEW_LINE> <DEDENT> def generate_weapon(self): <NEW_LINE> <INDENT> weapon = self.weapon_gen.generate() <NEW_LINE> if weapon['ammo'] == 'none': <NEW_LINE> <INDENT> print("You found a {}! It does {} damage!".format(weapon['name'], weapon['damage'])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("You found a {}! It does {} damage and uses {}!".format(weapon['name'], weapon['damage'], weapon['ammo'])) <NEW_LINE> <DEDENT> <DEDENT> def generate_encounter(self, location): <NEW_LINE> <INDENT> if location == "inside": <NEW_LINE> <INDENT> encounter = self.in_enc_gen.generate() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> encounter = self.out_enc_gen.generate() <NEW_LINE> <DEDENT> print(encounter['title']) <NEW_LINE> print("===================") <NEW_LINE> print(encounter['details']) <NEW_LINE> <DEDENT> def generate_food(self): <NEW_LINE> <INDENT> print(self.food_gen.generate()) <NEW_LINE> <DEDENT> def generate_gas(self): <NEW_LINE> <INDENT> print(self.gas_gen.generate()) <NEW_LINE> <DEDENT> def generate_materials(self): <NEW_LINE> <INDENT> print(self.mater_gen.generate()) <NEW_LINE> <DEDENT> def generate_ammo(self): <NEW_LINE> <INDENT> print(self.ammo_gen.generate())
An object to perform the application logic when requested.
6259904407d97122c4217faf
class LevelAdminForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Level <NEW_LINE> <DEDENT> def clean_unlock_condition(self): <NEW_LINE> <INDENT> data = self.cleaned_data["unlock_condition"] <NEW_LINE> utils.validate_form_predicates(data) <NEW_LINE> return data
admin form
62599044a4f1c619b294f80f
class LvMama(object): <NEW_LINE> <INDENT> def __init__(self, hot_base_url, new_base_url): <NEW_LINE> <INDENT> self.hot_base_url = hot_base_url <NEW_LINE> self.new_base_url = new_base_url <NEW_LINE> self.base_urls = [self.hot_base_url, self.new_base_url] <NEW_LINE> self.file = open("/Users/zhangjintao/Desktop/lvmama.txt") <NEW_LINE> <DEDENT> def response(self, url): <NEW_LINE> <INDENT> res = session.get(url) <NEW_LINE> json_data = res.json() <NEW_LINE> html = json_data.get("data") <NEW_LINE> return html <NEW_LINE> <DEDENT> def parse(self, data): <NEW_LINE> <INDENT> print(data, type(data)) <NEW_LINE> html_xpath = etree.HTML(data) <NEW_LINE> element_list = html_xpath.xpath("//li") <NEW_LINE> return element_list <NEW_LINE> <DEDENT> def save(self, data, page_num): <NEW_LINE> <INDENT> for i in data: <NEW_LINE> <INDENT> path = os.getcwd() <NEW_LINE> tags = i.xpath("./div/a/text()") <NEW_LINE> tags.pop(0) <NEW_LINE> text = "{0} {1} {2}".format( i.xpath("./p/span/text()")[0], i.xpath("./p/a/text()")[0], " ".join(tags)) <NEW_LINE> mongo_dict = {"palce": i.xpath( "./p/span/text()")[0], "message": text, "tag": " ".join(tags)} <NEW_LINE> print("mongodb:", mongo_dict) <NEW_LINE> self.file.write(pymongo) <NEW_LINE> print(text) <NEW_LINE> with open(path + os.path.sep + str(page_num), "w", encoding="utf-8") as f: <NEW_LINE> <INDENT> f.write(text) <NEW_LINE> <DEDENT> page_num += 1 <NEW_LINE> <DEDENT> return page_num <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> <DEDENT> def run(self, num, pl): <NEW_LINE> <INDENT> pl = re.sub(r'/', '', pl) <NEW_LINE> pl_path = lm_path + os.path.sep + pl <NEW_LINE> if not os.path.exists(pl_path): <NEW_LINE> <INDENT> os.mkdir(pl_path) <NEW_LINE> os.chdir(pl_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> os.chdir(pl_path) <NEW_LINE> <DEDENT> file_num = 0 <NEW_LINE> for base_url in self.base_urls: <NEW_LINE> <INDENT> page_num = 1 <NEW_LINE> while True: <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> start_url = base_url.format(num, page_num) <NEW_LINE> res_data = self.response(start_url) <NEW_LINE> if not res_data: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> data = self.parse(res_data) <NEW_LINE> if not data: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> file_num = self.save(data, file_num) <NEW_LINE> page_num += 1
获得驴妈妈问答的所有问题及分类 parse:解析数据 response:请求数据返回的对象 save:储存信息 run:启动程序
625990448e71fb1e983bcdde
class ContactList(list): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ContactList, self).__init__(*args, **kwargs) <NEW_LINE> self.__setstate__(None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def core(self): <NEW_LINE> <INDENT> return getattr(self, '_core', lambda: fakeItchat)() or fakeItchat <NEW_LINE> <DEDENT> @core.setter <NEW_LINE> def core(self, value): <NEW_LINE> <INDENT> self._core = ref(value) <NEW_LINE> <DEDENT> def set_default_value(self, initFunction=None, contactClass=None): <NEW_LINE> <INDENT> if hasattr(initFunction, '__call__'): <NEW_LINE> <INDENT> self.contactInitFn = initFunction <NEW_LINE> <DEDENT> if hasattr(contactClass, '__call__'): <NEW_LINE> <INDENT> self.contactClass = contactClass <NEW_LINE> <DEDENT> <DEDENT> def append(self, value): <NEW_LINE> <INDENT> contact = self.contactClass(value) <NEW_LINE> contact.core = self.core <NEW_LINE> if self.contactInitFn is not None: <NEW_LINE> <INDENT> contact = self.contactInitFn(self, contact) or contact <NEW_LINE> <DEDENT> super(ContactList, self).append(contact) <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> r = self.__class__([copy.deepcopy(v) for v in self]) <NEW_LINE> r.contactInitFn = self.contactInitFn <NEW_LINE> r.contactClass = self.contactClass <NEW_LINE> r.core = self.core <NEW_LINE> return r <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.contactInitFn = None <NEW_LINE> self.contactClass = User <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '[%s]' % ', '.join([repr(v) for v in self]) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s: %s>' % (self.__class__.__name__.split('.')[-1], self.__str__())
when a dict is append, init function will be called to format that dict
625990448e05c05ec3f6f7e2
@dataclass <NEW_LINE> class Size(): <NEW_LINE> <INDENT> width: int <NEW_LINE> height: int <NEW_LINE> aspect_ratio: float = field(init=False, repr=False) <NEW_LINE> def __post_init__(self): <NEW_LINE> <INDENT> self.aspect_ratio = float(self.width) / float(self.height)
Holds a size
625990448a43f66fc4bf34a2
class Wings(object): <NEW_LINE> <INDENT> def __init__(self,color,win): <NEW_LINE> <INDENT> self.color = color <NEW_LINE> self.buildWings(win) <NEW_LINE> <DEDENT> def buildWings(self,win): <NEW_LINE> <INDENT> wingP1 = Rectangle(Point(400, 325), Point(50, 350)) <NEW_LINE> wingTip = Arc(Point(75,325),Point(25,350), 90, 170) <NEW_LINE> wingP2 = Polygon(Point(40,350),Point(400,400),Point(400,330),Point(40,330)) <NEW_LINE> wingTip.draw(win) <NEW_LINE> wingTip.setOutline(self.color) <NEW_LINE> wingTip.setFill(self.color) <NEW_LINE> wingP1.draw(win) <NEW_LINE> wingP1.setOutline(self.color) <NEW_LINE> wingP1.setFill(self.color) <NEW_LINE> wingP2.draw(win) <NEW_LINE> wingP2.setOutline(self.color) <NEW_LINE> wingP2.setFill(self.color) <NEW_LINE> wing2p1 = Rectangle(Point(964, 325), Point(600, 350)) <NEW_LINE> wingTip2 = Arc(Point(975, 325), Point(950, 350), 90, -170) <NEW_LINE> wing2P2 = Polygon(Point(960, 350), Point(600, 400), Point(600, 330), Point(960, 330)) <NEW_LINE> wingTip2.draw(win) <NEW_LINE> wing2p1.draw(win) <NEW_LINE> wing2P2.draw(win) <NEW_LINE> wingTip2.setFill(self.color) <NEW_LINE> wingTip2.setOutline(self.color) <NEW_LINE> wing2p1.setFill(self.color) <NEW_LINE> wing2p1.setOutline(self.color) <NEW_LINE> wing2P2.setFill(self.color) <NEW_LINE> wing2P2.setOutline(self.color)
Allows the plane to fly.
62599044711fe17d825e1625
class CollectionDetail(APIView): <NEW_LINE> <INDENT> queryset = Collection.objects.all() <NEW_LINE> permission_classes = (permissions.IsAuthenticated, IsOwner,) <NEW_LINE> def get(self, request, slug): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_collection = self.queryset.get( owner=request.user, slug=slug ) <NEW_LINE> <DEDENT> except (Collection.MultipleObjectsReturned, Collection.DoesNotExist): <NEW_LINE> <INDENT> return Response(status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> serializer = CollectionSerializer(user_collection) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> <DEDENT> def put(self, request, slug): <NEW_LINE> <INDENT> name = request.data.get('name') <NEW_LINE> description = request.data.get('description') <NEW_LINE> try: <NEW_LINE> <INDENT> name = str(name) <NEW_LINE> description = str(description) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return serializers.ValidationError( "Server could not understand PUT fields. Expected strings.") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> collection = self.queryset.get( owner=request.user, slug=slug ) <NEW_LINE> <DEDENT> except Collection.DoesNotExist: <NEW_LINE> <INDENT> return Response(status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> if name: <NEW_LINE> <INDENT> collection.name = name <NEW_LINE> <DEDENT> if description: <NEW_LINE> <INDENT> collection.description = description <NEW_LINE> <DEDENT> collection.save() <NEW_LINE> serializer = CollectionSerializer(collection) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> def delete(self, request, slug): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_collection = self.queryset.get( owner=request.user, slug=slug ) <NEW_LINE> <DEDENT> except (Collection.MultipleObjectsReturned, Collection.DoesNotExist): <NEW_LINE> <INDENT> return Response(status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user_collection.delete() <NEW_LINE> return Response(status=status.HTTP_204_NO_CONTENT)
View details of one Collection, update or delete it.
6259904463b5f9789fe8647c
class SelectHostVersion(pyblish.api.ContextPlugin): <NEW_LINE> <INDENT> order = pyblish.api.CollectorOrder <NEW_LINE> hosts = ["nuke"] <NEW_LINE> def process(self, context): <NEW_LINE> <INDENT> import pyblish.api <NEW_LINE> context.data["host"] = pyblish.api.current_host()
Inject the host into context
6259904471ff763f4b5e8ab1
class WindowsInfoCollector(InfoCollector): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(WindowsInfoCollector, self).__init__() <NEW_LINE> self._config = infection_monkey.config.WormConfiguration <NEW_LINE> <DEDENT> def get_info(self): <NEW_LINE> <INDENT> logger.debug("Running Windows collector") <NEW_LINE> super(WindowsInfoCollector, self).get_info() <NEW_LINE> from infection_monkey.config import WormConfiguration <NEW_LINE> if MIMIKATZ_COLLECTOR in WormConfiguration.system_info_collector_classes: <NEW_LINE> <INDENT> self.get_mimikatz_info() <NEW_LINE> <DEDENT> return self.info <NEW_LINE> <DEDENT> def get_mimikatz_info(self): <NEW_LINE> <INDENT> logger.info("Gathering mimikatz info") <NEW_LINE> try: <NEW_LINE> <INDENT> credentials = MimikatzCredentialCollector.get_creds() <NEW_LINE> if credentials: <NEW_LINE> <INDENT> self.info["credentials"].update(credentials) <NEW_LINE> logger.info("Mimikatz info gathered successfully") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info("No mimikatz info was gathered") <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.info(f"Mimikatz credential collector failed: {e}")
System information collecting module for Windows operating systems
6259904494891a1f408ba07e
class Fixture(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def populate(): <NEW_LINE> <INDENT> pass
Class contains populate method as static method Is used by django-swagger-utils as a management command
6259904496565a6dacd2d912
class Cdf(_Distribution): <NEW_LINE> <INDENT> def __init__(self, spec, randomstream=None): <NEW_LINE> <INDENT> self._x = [] <NEW_LINE> self._cum = [] <NEW_LINE> if randomstream is None: <NEW_LINE> <INDENT> self.randomstream = random <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert isinstance(randomstream, random.Random) <NEW_LINE> self.randomstream = randomstream <NEW_LINE> <DEDENT> lastcum = 0 <NEW_LINE> lastx = -inf <NEW_LINE> spec = list(spec) <NEW_LINE> if len(spec) == 0: <NEW_LINE> <INDENT> raise AssertionError('no arguments specified') <NEW_LINE> <DEDENT> if spec[1] != 0: <NEW_LINE> <INDENT> raise AssertionError('first cumulative value should be 0') <NEW_LINE> <DEDENT> while len(spec) > 0: <NEW_LINE> <INDENT> x = spec.pop(0) <NEW_LINE> if len(spec) == 0: <NEW_LINE> <INDENT> raise AssertionError('uneven number of parameters specified') <NEW_LINE> <DEDENT> if x < lastx: <NEW_LINE> <INDENT> raise AssertionError( 'x value {} is smaller than previous value {}'.format(x, lastx)) <NEW_LINE> <DEDENT> cum = spec.pop(0) <NEW_LINE> if cum < lastcum: <NEW_LINE> <INDENT> raise AssertionError('cumulative value {} is smaller than previous value {}' .format(cum, lastcum)) <NEW_LINE> <DEDENT> self._x.append(x) <NEW_LINE> self._cum.append(cum) <NEW_LINE> lastx = x <NEW_LINE> lastcum = cum <NEW_LINE> <DEDENT> if lastcum == 0: <NEW_LINE> <INDENT> raise AssertionError('last cumulative value should be >0') <NEW_LINE> <DEDENT> for i in range(len(self._cum)): <NEW_LINE> <INDENT> self._cum[i] = self._cum[i] / lastcum <NEW_LINE> <DEDENT> self._mean = 0 <NEW_LINE> for i in range(len(self._cum) - 1): <NEW_LINE> <INDENT> self._mean += ((self._x[i] + self._x[i + 1]) / 2) * (self._cum[i + 1] - self._cum[i]) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Cdf' <NEW_LINE> <DEDENT> def print_info(self): <NEW_LINE> <INDENT> print('Cdf distribution ' + hex(id(self))) <NEW_LINE> print(' randomstream=' + hex(id(self.randomstream))) <NEW_LINE> <DEDENT> def sample(self): <NEW_LINE> <INDENT> r = self.randomstream.random() <NEW_LINE> for i in range(len(self._cum)): <NEW_LINE> <INDENT> if r < self._cum[i]: <NEW_LINE> <INDENT> return interpolate(r, self._cum[i - 1], self._cum[i], self._x[i - 1], self._x[i]) <NEW_LINE> <DEDENT> <DEDENT> return self._x[i] <NEW_LINE> <DEDENT> def mean(self): <NEW_LINE> <INDENT> return self._mean
Cumulative distribution function Cdf(spec,seed) Parameters ---------- spec : list or tuple list with x-values and corresponding cumulative density (x1,c1,x2,c2, ...xn,cn) |n| Requirements: x1<=x2<= ...<=xn |n| c1<=c2<=cn |n| c1=0 |n| cn>0 |n| all cumulative densities are auto scaled according to cn, so no need to set cn to 1 or 100. randomstream: randomstream if omitted, random will be used |n| if used as random.Random(12299) it defines a new stream with the specified seed
62599044b5575c28eb713651
class CreateMonitoredItemsResponse(FrozenClass): <NEW_LINE> <INDENT> ua_types = [ ('TypeId', 'NodeId'), ('ResponseHeader', 'ResponseHeader'), ('Results', 'ListOfMonitoredItemCreateResult'), ('DiagnosticInfos', 'ListOfDiagnosticInfo'), ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.TypeId = FourByteNodeId(ObjectIds.CreateMonitoredItemsResponse_Encoding_DefaultBinary) <NEW_LINE> self.ResponseHeader = ResponseHeader() <NEW_LINE> self.Results = [] <NEW_LINE> self.DiagnosticInfos = [] <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'CreateMonitoredItemsResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + 'ResponseHeader:' + str(self.ResponseHeader) + ', ' + 'Results:' + str(self.Results) + ', ' + 'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__
:ivar TypeId: :vartype TypeId: NodeId :ivar ResponseHeader: :vartype ResponseHeader: ResponseHeader :ivar Results: :vartype Results: MonitoredItemCreateResult :ivar DiagnosticInfos: :vartype DiagnosticInfos: DiagnosticInfo
625990441d351010ab8f4e2f
class TNRMv3ManifestParser(TNRMv3RequestParser): <NEW_LINE> <INDENT> def __init__(self, from_file=None, from_string=None): <NEW_LINE> <INDENT> super(TNRMv3ManifestParser, self).__init__(from_file, from_string) <NEW_LINE> <DEDENT> def get_links(self, rspec): <NEW_LINE> <INDENT> tn_links = [] <NEW_LINE> for l in rspec.findall(".//{%s}link" % (self.none)): <NEW_LINE> <INDENT> manager = l.find("{%s}component_manager" % (self.none)) <NEW_LINE> if manager is None: <NEW_LINE> <INDENT> self.raise_exception("Component-Mgr tag not found in link!") <NEW_LINE> <DEDENT> if not self.check_tn_link_resource(l, manager): <NEW_LINE> <INDENT> logger.info("Skipping this link, not a TN-res: %s", (l,)) <NEW_LINE> continue <NEW_LINE> <DEDENT> tn_link = Link(l.attrib.get("client_id"), manager.attrib.get("name"), l.attrib.get("vlantag")) <NEW_LINE> self.update_protogeni_cm_uuid(l, tn_link) <NEW_LINE> for i in l.iterfind("{%s}interface_ref" % (self.none)): <NEW_LINE> <INDENT> tn_link.add_interface_ref(i.attrib.get("client_id")) <NEW_LINE> <DEDENT> for p in l.iterfind("{%s}property" % (self.none)): <NEW_LINE> <INDENT> tn_link.add_property(p.attrib.get("source_id"), p.attrib.get("dest_id"), p.attrib.get("capacity")) <NEW_LINE> <DEDENT> tn_links.append(tn_link.serialize()) <NEW_LINE> <DEDENT> return tn_links <NEW_LINE> <DEDENT> def links(self): <NEW_LINE> <INDENT> return self.get_links(self.rspec)
Manifest parser inherits from request parser as they use basically the same structure
625990443eb6a72ae038b971
class Coinhsl(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "http://www.hsl.rl.ac.uk/ipopt/" <NEW_LINE> url = "file://{0}/coinhsl-archive-2014.01.17.tar.gz".format(os.getcwd()) <NEW_LINE> version('2015.06.23', sha256='3e955a2072f669b8f357ae746531b37aea921552e415dc219a5dd13577575fb3') <NEW_LINE> version('2014.01.17', sha256='ed49fea62692c5d2f928d4007988930da9ff9a2e944e4c559d028671d122437b') <NEW_LINE> version('2014.01.10', sha256='7c2be60a3913b406904c66ee83acdbd0709f229b652c4e39ee5d0876f6b2e907', preferred=True) <NEW_LINE> parallel = False <NEW_LINE> variant('blas', default=False, description='Link to external BLAS library') <NEW_LINE> depends_on('blas', when='+blas') <NEW_LINE> def configure_args(self): <NEW_LINE> <INDENT> spec = self.spec <NEW_LINE> args = [] <NEW_LINE> if spec.satisfies('+blas'): <NEW_LINE> <INDENT> args.append('--with-blas={0}'.format(spec['blas'].libs.ld_flags)) <NEW_LINE> <DEDENT> return args
CoinHSL is a collection of linear algebra libraries (KB22, MA27, MA28, MA54, MA57, MA64, MA77, MA86, MA97, MC19, MC34, MC64, MC68, MC69, MC78, MC80, OF01, ZB01, ZB11) bundled for use with IPOPT and other applications that use these HSL routines. Note: CoinHSL is licensed software. You will need to request a license from Research Councils UK and download a .tar.gz archive of CoinHSL yourself. Spack will search your current directory for the download file. Alternatively, add this file to a mirror so that Spack can find it. For instructions on how to set up a mirror, see http://spack.readthedocs.io/en/latest/mirrors.html
6259904407f4c71912bb0742
class VirtualHatButton(AbstractVirtualButton): <NEW_LINE> <INDENT> direction_to_name = { ( 0, 0): "center", ( 0, 1): "north", ( 1, 1): "north-east", ( 1, 0): "east", ( 1, -1): "south-east", ( 0, -1): "south", (-1, -1): "south-west", (-1, 0): "west", (-1, 1): "north-west" } <NEW_LINE> name_to_direction = { "center": (0, 0), "north": (0, 1), "north-east": (1, 1), "east": (1, 0), "south-east": (1, -1), "south": (0, -1), "south-west": (-1, -1), "west": (-1, 0), "north-west": (-1, 1) } <NEW_LINE> def __init__(self, directions=()): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.directions = list(set(directions)) <NEW_LINE> <DEDENT> def from_xml(self, node): <NEW_LINE> <INDENT> for key, value in node.items(): <NEW_LINE> <INDENT> if key in VirtualHatButton.name_to_direction and profile.parse_bool(value): <NEW_LINE> <INDENT> self.directions.append(key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def to_xml(self): <NEW_LINE> <INDENT> node = ElementTree.Element("virtual-button") <NEW_LINE> for direction in self.directions: <NEW_LINE> <INDENT> if direction in VirtualHatButton.name_to_direction: <NEW_LINE> <INDENT> node.set(direction, "1") <NEW_LINE> <DEDENT> <DEDENT> return node
Virtual button which combines hat directions into a button.
6259904473bcbd0ca4bcb59c
class TransformerDecoder(Module): <NEW_LINE> <INDENT> def __init__(self, decoder_layer, num_layers, norm=None): <NEW_LINE> <INDENT> super(TransformerDecoder, self).__init__() <NEW_LINE> self.layers = _get_clones(decoder_layer, num_layers) <NEW_LINE> self.num_layers = num_layers <NEW_LINE> self.norm = norm <NEW_LINE> <DEDENT> def forward(self, tgt, memory, tgt_mask=None, memory_mask=None, tgt_key_padding_mask=None, memory_key_padding_mask=None): <NEW_LINE> <INDENT> output = tgt <NEW_LINE> for i in range(self.num_layers): <NEW_LINE> <INDENT> output, output_weights, memory_weights = self.layers[i](output, memory, tgt_mask=tgt_mask, memory_mask=memory_mask, tgt_key_padding_mask=tgt_key_padding_mask, memory_key_padding_mask=memory_key_padding_mask) <NEW_LINE> <DEDENT> if self.norm: <NEW_LINE> <INDENT> output = self.norm(output) <NEW_LINE> <DEDENT> return output, output_weights, memory_weights
TransformerDecoder is a stack of N decoder layers Args: decoder_layer: an instance of the TransformerDecoderLayer() class (required). num_layers: the number of sub-decoder-layers in the decoder (required). norm: the layer normalization component (optional). Examples:: >>> decoder_layer = nn.TransformerDecoderLayer(d_model=512, nhead=8) >>> transformer_decoder = nn.TransformerDecoder(decoder_layer, num_layers=6) >>> memory = torch.rand(10, 32, 512) >>> tgt = torch.rand(20, 32, 512) >>> out = transformer_decoder(tgt, memory)
62599044004d5f362081f96e
class SiteRisque(ModelSQL): <NEW_LINE> <INDENT> __name__ = 'site.site-site.risque' <NEW_LINE> _table = 'site_risque_rel' <NEW_LINE> site = fields.Many2One('site.site', 'site', ondelete='CASCADE', required=True) <NEW_LINE> code = fields.Many2One('site.code', 'code', ondelete='CASCADE', required=True)
Site - Risque
625990441f5feb6acb163f05
class MocneAI(AI): <NEW_LINE> <INDENT> def __init__(self, plansza_wlasna, plansza_gracza): <NEW_LINE> <INDENT> super().__init__(plansza_wlasna) <NEW_LINE> self.druga_plansza = deepcopy(plansza_gracza) <NEW_LINE> <DEDENT> def wybierz_konfiguracje_pol(self, cel): <NEW_LINE> <INDENT> pass
AI wykorzystujące do polowania i celowania symulację statystycznego występowania statków na planszy.
625990448e71fb1e983bcde0
class ClusterWithoutCPException(Exception): <NEW_LINE> <INDENT> pass
Exception to be thrown when creating a cluster without specifying a custom Control plane code
62599044dc8b845886d548ca
class ODE_Capsule(ODE_Object): <NEW_LINE> <INDENT> def __init__(self, geom, ident=None): <NEW_LINE> <INDENT> self.src = vtkContourFilter() <NEW_LINE> ODE_Object.__init__(self, geom, ident) <NEW_LINE> (radius, height) = geom.getParams() <NEW_LINE> cylinder = vtkCylinder() <NEW_LINE> cylinder.SetRadius(radius) <NEW_LINE> vertPlane = vtkPlane() <NEW_LINE> vertPlane.SetOrigin(0, height/2, 0) <NEW_LINE> vertPlane.SetNormal(0, 1, 0) <NEW_LINE> basePlane = vtkPlane() <NEW_LINE> basePlane.SetOrigin(0, -height/2, 0) <NEW_LINE> basePlane.SetNormal(0, -1, 0) <NEW_LINE> sphere_1 = vtkSphere() <NEW_LINE> sphere_1.SetCenter(0, -height/2, 0) <NEW_LINE> sphere_1.SetRadius(radius) <NEW_LINE> sphere_2 = vtkSphere() <NEW_LINE> sphere_2.SetCenter(0, height/2, 0) <NEW_LINE> sphere_2.SetRadius(radius) <NEW_LINE> cylinder_fct = vtkImplicitBoolean() <NEW_LINE> cylinder_fct.SetOperationTypeToIntersection() <NEW_LINE> cylinder_fct.AddFunction(cylinder) <NEW_LINE> cylinder_fct.AddFunction(vertPlane) <NEW_LINE> cylinder_fct.AddFunction(basePlane) <NEW_LINE> capsule = vtkImplicitBoolean() <NEW_LINE> capsule.SetOperationTypeToUnion() <NEW_LINE> capsule.AddFunction(cylinder_fct) <NEW_LINE> capsule.AddFunction(sphere_1) <NEW_LINE> capsule.AddFunction(sphere_2) <NEW_LINE> capsule_fct = vtkSampleFunction() <NEW_LINE> capsule_fct.SetImplicitFunction(capsule) <NEW_LINE> capsule_fct.ComputeNormalsOff() <NEW_LINE> capsule_fct.SetModelBounds(-height-radius, height+radius, -height-radius, height+radius, -height-radius, height+radius) <NEW_LINE> self.src.SetInputConnection(capsule_fct.GetOutputPort()) <NEW_LINE> self.src.SetValue(0, 0.0) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.isEnabled(): <NEW_LINE> <INDENT> ODE_Object.update(self) <NEW_LINE> self.RotateX(90)
VTK visualization of class ode.GeomCapsule
625990448da39b475be04501
class TestComponent(unittest2.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.lambda_, self.mu = symbols('l, m', constant=True, positive=True, null=False ) <NEW_LINE> self.t = symbols('t', positive=True) <NEW_LINE> self.component = Component('C', self.lambda_, self.mu) <NEW_LINE> <DEDENT> def test_reliability(self): <NEW_LINE> <INDENT> self.assertEqual(exp(-self.lambda_ * self.t), self.component.reliability(self.t)) <NEW_LINE> <DEDENT> def test_maintainability(self): <NEW_LINE> <INDENT> self.assertEqual(1 - exp(-self.mu * self.t), self.component.maintainability(self.t)) <NEW_LINE> <DEDENT> def test_availability(self): <NEW_LINE> <INDENT> availability = self.lambda_ * exp(self.t*(-self.lambda_ - self.mu)) / (self.lambda_ + self.mu) + self.mu/(self.lambda_ + self.mu) <NEW_LINE> self.assertEqual(availability, self.component.availability(self.t))
Test the Component class.
6259904407f4c71912bb0743
class Meta: <NEW_LINE> <INDENT> verbose_name = _(u"Condición de Ingreso") <NEW_LINE> verbose_name_plural = _(u"Condiciones de Ingreso") <NEW_LINE> ordering = ["descripcion"]
@note: Superclase que configura los parámetros de la clase Condicion_Ingreso @licence: GPLv2 @author: T.S.U. Roldan D. Vargas G. @contact: roldandvg at gmail.com
62599044462c4b4f79dbcd0f
class G_LogChildNode(G_SessionChildNode): <NEW_LINE> <INDENT> def GetLogNode(self): <NEW_LINE> <INDENT> return self.GetParentNode(G_Project.NodeID_Log) <NEW_LINE> <DEDENT> def GetLogfile(self): <NEW_LINE> <INDENT> return self.GetLogNode().GetLogfile() <NEW_LINE> <DEDENT> def GetLogNodeChildNode(self, factory_id): <NEW_LINE> <INDENT> return self.GetLogNode().FindChildNode(factory_id = factory_id, recursive = True) <NEW_LINE> <DEDENT> def GetDisplayNode(self): <NEW_LINE> <INDENT> return None
Mixin class to extend child nodes of a logfile node with common behaviour
62599044e76e3b2f99fd9d1d
class CommitteeFeedbackDetailPermission(permissions.BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> if user.is_superuser: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> method = request.method <NEW_LINE> committee_id = request.data.get('committee', -1) <NEW_LINE> feedback_id = view.kwargs.get('pk', -1) <NEW_LINE> if (method == 'POST' and user.is_authenticated() and user.is_delegate() and user.delegate.assignment and (not user.delegate.committee_feedback_submitted)): <NEW_LINE> <INDENT> return int(user.delegate.assignment.committee.id) == int( committee_id) <NEW_LINE> <DEDENT> if (method == 'GET' and user.is_authenticated() and user.is_chair() and user.committee): <NEW_LINE> <INDENT> query = CommitteeFeedback.objects.get(id=feedback_id) <NEW_LINE> if query: <NEW_LINE> <INDENT> return user.committee.id == query.committee.id <NEW_LINE> <DEDENT> <DEDENT> return False
Accept POST for only the delegate of the committee Accept GET request from chair of committee
6259904426238365f5fade6c
class _TestDataObject(object): <NEW_LINE> <INDENT> def __init__(self, property_1, property_2, property_3): <NEW_LINE> <INDENT> self._property_1 = property_1 <NEW_LINE> self._property_2 = property_2 <NEW_LINE> self._property_3 = property_3 <NEW_LINE> <DEDENT> @property <NEW_LINE> def property_1(self): <NEW_LINE> <INDENT> return self._property_1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def property_2(self): <NEW_LINE> <INDENT> return self._property_2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def property_3(self): <NEW_LINE> <INDENT> return self._property_3
Class used for generating test data objects.
6259904471ff763f4b5e8ab3
class Ingesta(models.Model, Turno): <NEW_LINE> <INDENT> admision = models.ForeignKey(Admision, related_name='ingestas') <NEW_LINE> fecha_y_hora = models.DateTimeField(default=timezone.now) <NEW_LINE> ingerido = models.CharField(max_length=200, blank=True) <NEW_LINE> cantidad = models.IntegerField() <NEW_LINE> liquido = models.NullBooleanField(blank=True, null=True) <NEW_LINE> via = models.CharField(max_length=200, blank=True, null=True) <NEW_LINE> usuario = models.ForeignKey(User, blank=True, null=True, related_name='ingestas') <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('enfermeria-ingestas-excretas', args=[self.admision.id])
Registra las ingestas que una :class:`Persona`
6259904450485f2cf55dc298
class Computing(models.Model): <NEW_LINE> <INDENT> author = models.ForeignKey('auth.User', null=True, on_delete=models.DO_NOTHING) <NEW_LINE> edited_by = models.CharField(max_length=200, null=True) <NEW_LINE> name = models.CharField(max_length=200, blank=True, help_text="Name of the computer.") <NEW_LINE> type_object = models.ForeignKey('Type_Object', help_text="Type of the computing.", on_delete=models.DO_NOTHING) <NEW_LINE> location = models.ForeignKey('Location', help_text="Where it is the computer.", on_delete=models.DO_NOTHING) <NEW_LINE> user_name = models.ForeignKey('Full_Name_Users', blank=True, help_text="Username of this computer.", on_delete=models.DO_NOTHING) <NEW_LINE> model = models.CharField(max_length=200, blank=True, help_text="Model of the computer.") <NEW_LINE> processor = models.CharField(max_length=200, blank=True, help_text="Processor of the computer.") <NEW_LINE> memory = models.CharField(max_length=200, blank=True, help_text="How much memory have the computer.") <NEW_LINE> screen_1 = models.CharField(max_length=200, blank=True, help_text="Model of the screen of the computer.") <NEW_LINE> screen_2 = models.CharField(max_length=200, blank=True, help_text="Model of the second screen " "of the computer if this have a second screen.") <NEW_LINE> keyboard = models.CharField(max_length=200, blank=True, help_text="keyboard of the computer.") <NEW_LINE> mouse = models.CharField(max_length=200, blank=True, help_text="Mouse of the computer.") <NEW_LINE> others = models.CharField(max_length=200, blank=True, help_text="Others characteristics of the computer.") <NEW_LINE> setup = models.ForeignKey('Setup', blank=True, help_text="In order to the computer is used.", on_delete=models.DO_NOTHING) <NEW_LINE> created_date = models.DateTimeField(default=timezone.now, help_text="Date when was created.") <NEW_LINE> def create(self): <NEW_LINE> <INDENT> self.created_date = timezone.now() <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Computing model docstring. This model stores the computers.
62599044d99f1b3c44d069b0
class IBeforeRender(IDict): <NEW_LINE> <INDENT> rendering_val = Attribute('The value returned by a view or passed to a ' '``render`` method for this rendering. ' 'This feature is new in Pyramid 1.2.')
Subscribers to this event may introspect the and modify the set of :term:`renderer globals` before they are passed to a :term:`renderer`. The event object itself provides a dictionary-like interface for adding and removing :term:`renderer globals`. The keys and values of the dictionary are those globals. For example:: from repoze.events import subscriber from pyramid.interfaces import IBeforeRender @subscriber(IBeforeRender) def add_global(event): event['mykey'] = 'foo' See also :ref:`beforerender_event`.
6259904430c21e258be99b19
class Telefone(BaseModel): <NEW_LINE> <INDENT> numero = models.CharField(max_length=50) <NEW_LINE> ddd = models.CharField(max_length=5, choices=DDD_BRASIL) <NEW_LINE> tipo = models.CharField(max_length=35, choices=TIPO_TELEFONE) <NEW_LINE> operadora = models.CharField(max_length=50, choices=OPERADORAS) <NEW_LINE> whatsapp = models.BooleanField(default=False)
Dados telefonicos do cliente
62599044d99f1b3c44d069b1
class PostsRSS(Feed): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.tag = kwargs.pop("tag", None) <NEW_LINE> self.category = kwargs.pop("category", None) <NEW_LINE> self.username = kwargs.pop("username", None) <NEW_LINE> super(PostsRSS, self).__init__(*args, **kwargs) <NEW_LINE> self._public = True <NEW_LINE> try: <NEW_LINE> <INDENT> page = Page.objects.published().get(slug=settings.BLOG_SLUG) <NEW_LINE> <DEDENT> except Page.DoesNotExist: <NEW_LINE> <INDENT> page = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._public = not page.login_required <NEW_LINE> <DEDENT> if self._public: <NEW_LINE> <INDENT> if page is not None: <NEW_LINE> <INDENT> self.title = page.title <NEW_LINE> self.description = strip_tags(page.description) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> settings.use_editable() <NEW_LINE> self.title = settings.SITE_TITLE <NEW_LINE> self.description = settings.SITE_TAGLINE <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_feed(self, *args, **kwargs): <NEW_LINE> <INDENT> if VERSION < (1, 4) and args[0]: <NEW_LINE> <INDENT> attr, value = args[0].split("/", 1) <NEW_LINE> setattr(self, attr, value) <NEW_LINE> <DEDENT> return super(PostsRSS, self).get_feed(*args, **kwargs) <NEW_LINE> <DEDENT> def link(self): <NEW_LINE> <INDENT> return reverse("blog_post_feed", kwargs={"format": "rss"}) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> if not self._public: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> blog_posts = BlogPost.objects.published().select_related("user") <NEW_LINE> if self.tag: <NEW_LINE> <INDENT> tag = get_object_or_404(Keyword, slug=self.tag) <NEW_LINE> blog_posts = blog_posts.filter(keywords__in=tag.assignments.all()) <NEW_LINE> <DEDENT> if self.category: <NEW_LINE> <INDENT> category = get_object_or_404(BlogCategory, slug=self.category) <NEW_LINE> blog_posts = blog_posts.filter(categories=category) <NEW_LINE> <DEDENT> if self.username: <NEW_LINE> <INDENT> author = get_object_or_404(User, username=self.username) <NEW_LINE> blog_posts = blog_posts.filter(user=author) <NEW_LINE> <DEDENT> return blog_posts <NEW_LINE> <DEDENT> def item_description(self, item): <NEW_LINE> <INDENT> return item.content <NEW_LINE> <DEDENT> def categories(self): <NEW_LINE> <INDENT> if not self._public: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return BlogCategory.objects.all() <NEW_LINE> <DEDENT> def item_author_name(self, item): <NEW_LINE> <INDENT> return item.user.get_full_name() or item.user.username <NEW_LINE> <DEDENT> def item_author_link(self, item): <NEW_LINE> <INDENT> username = item.user.username <NEW_LINE> return reverse("blog_post_list_author", kwargs={"username": username}) <NEW_LINE> <DEDENT> def item_pubdate(self, item): <NEW_LINE> <INDENT> return item.publish_date <NEW_LINE> <DEDENT> def item_categories(self, item): <NEW_LINE> <INDENT> return item.categories.all()
RSS feed for all blog posts.
62599044baa26c4b54d505bb
class BlacklistToken(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'blacklist_tokens' <NEW_LINE> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> token = db.Column(db.String(500), unique=True, nullable=False) <NEW_LINE> blacklisted_on = db.Column(db.DateTime, nullable=False) <NEW_LINE> def __init__(self, token): <NEW_LINE> <INDENT> self.token = token <NEW_LINE> self.blacklisted_on = datetime.datetime.utcnow() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<token: '{}'>".format(self.token) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def check_blacklist(auth_token): <NEW_LINE> <INDENT> res = BlacklistToken.query.filter_by(token=str(auth_token)).first() <NEW_LINE> if res: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Token Model for storing JWT tokens
6259904496565a6dacd2d913
class Error(RpcResponse): <NEW_LINE> <INDENT> def __init__(self, text, **kwargs): <NEW_LINE> <INDENT> super(Error, self).__init__(error=text, **kwargs)
Simple responses. Just for pretty code and some kind of "protocol". Example:: return Error('Something happened', code=error_code, traceback=traceback)
62599044d4950a0f3b1117ca
class StorageDiskTask(CliTask): <NEW_LINE> <INDENT> def process(self): <NEW_LINE> <INDENT> self.manual = Help() <NEW_LINE> if self.__help(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.load_config() <NEW_LINE> self.account = AzureAccount(self.config) <NEW_LINE> container_name = self.account.storage_container() <NEW_LINE> self.storage = Storage( self.account, container_name ) <NEW_LINE> if self.command_args['--start-datetime'] == 'now': <NEW_LINE> <INDENT> start = datetime.datetime.utcnow() - datetime.timedelta(minutes=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = self.validate_date('--start-datetime') <NEW_LINE> <DEDENT> if self.command_args['--expiry-datetime'] == '30 days from start': <NEW_LINE> <INDENT> expiry = start + datetime.timedelta(days=30) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> expiry = self.validate_date('--expiry-datetime') <NEW_LINE> <DEDENT> self.validate_sas_permissions('--permissions') <NEW_LINE> if self.command_args['upload']: <NEW_LINE> <INDENT> self.__upload() <NEW_LINE> <DEDENT> elif self.command_args['delete']: <NEW_LINE> <INDENT> self.__delete() <NEW_LINE> <DEDENT> elif self.command_args['sas']: <NEW_LINE> <INDENT> self.__sas( container_name, start, expiry, self.command_args['--permissions'] ) <NEW_LINE> <DEDENT> <DEDENT> def __help(self): <NEW_LINE> <INDENT> if self.command_args['help']: <NEW_LINE> <INDENT> self.manual.show('azurectl::storage::disk') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.manual <NEW_LINE> <DEDENT> def __upload(self): <NEW_LINE> <INDENT> if self.command_args['--quiet']: <NEW_LINE> <INDENT> self.__upload_no_progress() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__upload_with_progress() <NEW_LINE> <DEDENT> <DEDENT> def __upload_no_progress(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__process_upload() <NEW_LINE> <DEDENT> except (KeyboardInterrupt): <NEW_LINE> <INDENT> raise SystemExit('azurectl aborted by keyboard interrupt') <NEW_LINE> <DEDENT> <DEDENT> def __upload_with_progress(self): <NEW_LINE> <INDENT> image = self.command_args['--source'] <NEW_LINE> progress = BackgroundScheduler(timezone=utc) <NEW_LINE> progress.add_job( self.storage.print_upload_status, 'interval', seconds=3 ) <NEW_LINE> progress.start() <NEW_LINE> try: <NEW_LINE> <INDENT> self.__process_upload() <NEW_LINE> self.storage.print_upload_status() <NEW_LINE> progress.shutdown() <NEW_LINE> <DEDENT> except (KeyboardInterrupt): <NEW_LINE> <INDENT> progress.shutdown() <NEW_LINE> raise SystemExit('azurectl aborted by keyboard interrupt') <NEW_LINE> <DEDENT> print() <NEW_LINE> log.info('Uploaded %s', image) <NEW_LINE> <DEDENT> def __process_upload(self): <NEW_LINE> <INDENT> self.storage.upload( self.command_args['--source'], self.command_args['--blob-name'], self.command_args['--max-chunk-size'] ) <NEW_LINE> <DEDENT> def __sas(self, container_name, start, expiry, permissions): <NEW_LINE> <INDENT> result = DataCollector() <NEW_LINE> out = DataOutput( result, self.global_args['--output-format'], self.global_args['--output-style'] ) <NEW_LINE> result.add( self.command_args['--blob-name'] + ':sas_url', self.storage.disk_image_sas( container_name, self.command_args['--blob-name'], start, expiry, permissions ) ) <NEW_LINE> out.display() <NEW_LINE> <DEDENT> def __delete(self): <NEW_LINE> <INDENT> image = self.command_args['--blob-name'] <NEW_LINE> self.storage.delete(image) <NEW_LINE> log.info('Deleted %s', image)
Process disk commands
6259904476d4e153a661dbfe
class AdminAuthenticationForm(AuthenticationForm): <NEW_LINE> <INDENT> this_is_the_login_form = forms.BooleanField( widget=forms.HiddenInput, initial=1, error_messages={'required': ugettext_lazy("Please log in again, because your session has expired.")}) <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> from xadmin.util import User <NEW_LINE> ERROR_MESSAGE = ugettext_lazy("Please enter the correct username and password " "for a staff account. Note that both fields are case-sensitive.") <NEW_LINE> username = self.cleaned_data.get('username') <NEW_LINE> password = self.cleaned_data.get('password') <NEW_LINE> message = ERROR_MESSAGE <NEW_LINE> if username and password: <NEW_LINE> <INDENT> self.user_cache = authenticate( username=username, password=password) <NEW_LINE> if self.user_cache is None: <NEW_LINE> <INDENT> if u'@' in username: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(email=username) <NEW_LINE> <DEDENT> except (User.DoesNotExist, User.MultipleObjectsReturned): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if user.check_password(password): <NEW_LINE> <INDENT> message = _("Your e-mail address is not your username." " Try '%s' instead.") % user.username <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise forms.ValidationError(message) <NEW_LINE> <DEDENT> elif not self.user_cache.is_active or not self.user_cache.is_staff: <NEW_LINE> <INDENT> raise forms.ValidationError(message) <NEW_LINE> <DEDENT> <DEDENT> if hasattr(self, 'check_for_test_cookie'): <NEW_LINE> <INDENT> self.check_for_test_cookie() <NEW_LINE> <DEDENT> return self.cleaned_data
A custom authentication form used in the admin app.
6259904426068e7796d4dc58
class LTSD(): <NEW_LINE> <INDENT> def __init__(self,winsize,window,order): <NEW_LINE> <INDENT> self.winsize = int(winsize) <NEW_LINE> self.window = window <NEW_LINE> self.order = order <NEW_LINE> self.amplitude = {} <NEW_LINE> <DEDENT> def get_amplitude(self,signal,l): <NEW_LINE> <INDENT> if l in self.amplitude: <NEW_LINE> <INDENT> return self.amplitude[l] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> amp = sp.absolute(sp.fft(get_frame(signal, self.winsize,l) * self.window)) <NEW_LINE> self.amplitude[l] = amp <NEW_LINE> return amp <NEW_LINE> <DEDENT> <DEDENT> def compute_noise_avg_spectrum(self, nsignal): <NEW_LINE> <INDENT> windownum = int(len(nsignal)//(self.winsize//2) - 1) <NEW_LINE> avgamp = np.zeros(self.winsize) <NEW_LINE> for l in range(windownum): <NEW_LINE> <INDENT> avgamp += sp.absolute(sp.fft(get_frame(nsignal, self.winsize,l) * self.window)) <NEW_LINE> <DEDENT> return avgamp/float(windownum) <NEW_LINE> <DEDENT> def compute(self,signal): <NEW_LINE> <INDENT> self.windownum = int(len(signal)//(self.winsize//2) - 1) <NEW_LINE> ltsds = np.zeros(self.windownum) <NEW_LINE> self.avgnoise = self.compute_noise_avg_spectrum(signal[0:self.winsize*20])**2 <NEW_LINE> for l in range(self.windownum): <NEW_LINE> <INDENT> ltsds[l] = self.ltsd(signal,l,5) <NEW_LINE> <DEDENT> return ltsds <NEW_LINE> <DEDENT> def ltse(self,signal,l,order): <NEW_LINE> <INDENT> maxamp = np.zeros(self.winsize) <NEW_LINE> for idx in range(l-order,l+order+1): <NEW_LINE> <INDENT> amp = self.get_amplitude(signal,idx) <NEW_LINE> maxamp = np.maximum(maxamp,amp) <NEW_LINE> <DEDENT> return maxamp <NEW_LINE> <DEDENT> def ltsd(self,signal,l,order): <NEW_LINE> <INDENT> if l < order or l+order >= self.windownum: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 10.0 * np.log10(np.sum(self.ltse(signal,l,order)**2/self.avgnoise)/float(len(self.avgnoise)))
LTSD VAD code from jfsantos
6259904423849d37ff8523cd
class V1beta1PodDisruptionBudgetSpec(object): <NEW_LINE> <INDENT> def __init__(self, min_available=None, selector=None): <NEW_LINE> <INDENT> self.swagger_types = { 'min_available': 'IntstrIntOrString', 'selector': 'V1LabelSelector' } <NEW_LINE> self.attribute_map = { 'min_available': 'minAvailable', 'selector': 'selector' } <NEW_LINE> self._min_available = min_available <NEW_LINE> self._selector = selector <NEW_LINE> <DEDENT> @property <NEW_LINE> def min_available(self): <NEW_LINE> <INDENT> return self._min_available <NEW_LINE> <DEDENT> @min_available.setter <NEW_LINE> def min_available(self, min_available): <NEW_LINE> <INDENT> self._min_available = min_available <NEW_LINE> <DEDENT> @property <NEW_LINE> def selector(self): <NEW_LINE> <INDENT> return self._selector <NEW_LINE> <DEDENT> @selector.setter <NEW_LINE> def selector(self, selector): <NEW_LINE> <INDENT> self._selector = selector <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599044a79ad1619776b392
class JoinToProjectingJoin(Rule): <NEW_LINE> <INDENT> def fire(self, expr): <NEW_LINE> <INDENT> if not isinstance(expr, algebra.Join) or isinstance(expr, algebra.ProjectingJoin): <NEW_LINE> <INDENT> return expr <NEW_LINE> <DEDENT> return algebra.ProjectingJoin(expr.condition, expr.left, expr.right, expr.scheme().ascolumnlist()) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Join => ProjectingJoin"
A rewrite rule for turning every Join into a ProjectingJoin
6259904491af0d3eaad3b137
class S3MainMenu(default.S3MainMenu): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def menu_modules(cls): <NEW_LINE> <INDENT> if not current.auth.is_logged_in(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> settings = current.deployment_settings <NEW_LINE> if settings.get_event_label(): <NEW_LINE> <INDENT> EVENTS = "Disasters" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> EVENTS = "Events" <NEW_LINE> <DEDENT> if settings.get_incident_label(): <NEW_LINE> <INDENT> INCIDENTS = "Tickets" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> INCIDENTS = "Incidents" <NEW_LINE> <DEDENT> menu= [MM("Call Logs", c="event", f="incident_report"), MM(INCIDENTS, c="event", f="incident", m="summary"), MM("Scenarios", c="event", f="scenario"), MM("Map", c="gis", f="index"), MM("Current System", link=False)( MM(EVENTS, c="event", f="event"), MM("Disaster Assessments", c="dc", f="index"), MM("Human Resources", c="hrm", f="staff"), MM("Infrastructure", c="transport", f="index"), MM("Population", c="stats", f="demographic_data", m="summary"), MM("Item Management", c="asset", f="index"), ), MM("DRR", link=False)( MM("Projects", c="project", f="project", m="summary"), MM("Trainings", c="hrm", f="training_event", m="summary"), ), MM("Other", link=False)( MM("Cases", c="br", f="person", vars={"closed": 0}), MM("Disease", c="disease", f="disease"), MM("Shelters", c="cr", f="shelter"), ), ] <NEW_LINE> return menu
Custom Application Main Menu
6259904450485f2cf55dc299
class LogCaptureFixture: <NEW_LINE> <INDENT> def __init__(self, item: nodes.Node) -> None: <NEW_LINE> <INDENT> self._item = item <NEW_LINE> self._initial_handler_level = None <NEW_LINE> self._initial_logger_levels = {} <NEW_LINE> <DEDENT> def _finalize(self) -> None: <NEW_LINE> <INDENT> if self._initial_handler_level is not None: <NEW_LINE> <INDENT> self.handler.setLevel(self._initial_handler_level) <NEW_LINE> <DEDENT> for logger_name, level in self._initial_logger_levels.items(): <NEW_LINE> <INDENT> logger = logging.getLogger(logger_name) <NEW_LINE> logger.setLevel(level) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def handler(self) -> LogCaptureHandler: <NEW_LINE> <INDENT> return self._item._store[caplog_handler_key] <NEW_LINE> <DEDENT> def get_records(self, when: str) -> List[logging.LogRecord]: <NEW_LINE> <INDENT> return self._item._store[caplog_records_key].get(when, []) <NEW_LINE> <DEDENT> @property <NEW_LINE> def text(self) -> str: <NEW_LINE> <INDENT> return _remove_ansi_escape_sequences(self.handler.stream.getvalue()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def records(self) -> List[logging.LogRecord]: <NEW_LINE> <INDENT> return self.handler.records <NEW_LINE> <DEDENT> @property <NEW_LINE> def record_tuples(self) -> List[Tuple[str, int, str]]: <NEW_LINE> <INDENT> return [(r.name, r.levelno, r.getMessage()) for r in self.records] <NEW_LINE> <DEDENT> @property <NEW_LINE> def messages(self) -> List[str]: <NEW_LINE> <INDENT> return [r.getMessage() for r in self.records] <NEW_LINE> <DEDENT> def clear(self) -> None: <NEW_LINE> <INDENT> self.handler.reset() <NEW_LINE> <DEDENT> def set_level(self, level: Union[int, str], logger: Optional[str] = None) -> None: <NEW_LINE> <INDENT> logger_obj = logging.getLogger(logger) <NEW_LINE> self._initial_logger_levels.setdefault(logger, logger_obj.level) <NEW_LINE> logger_obj.setLevel(level) <NEW_LINE> if self._initial_handler_level is None: <NEW_LINE> <INDENT> self._initial_handler_level = self.handler.level <NEW_LINE> <DEDENT> self.handler.setLevel(level) <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def at_level( self, level: int, logger: Optional[str] = None ) -> Generator[None, None, None]: <NEW_LINE> <INDENT> logger_obj = logging.getLogger(logger) <NEW_LINE> orig_level = logger_obj.level <NEW_LINE> logger_obj.setLevel(level) <NEW_LINE> handler_orig_level = self.handler.level <NEW_LINE> self.handler.setLevel(level) <NEW_LINE> try: <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> logger_obj.setLevel(orig_level) <NEW_LINE> self.handler.setLevel(handler_orig_level)
Provides access and control of log capturing.
625990443eb6a72ae038b974
class DeleteDocument(DeleteView): <NEW_LINE> <INDENT> def get_object(self): <NEW_LINE> <INDENT> return get_object_or_404(Document, pk = self.kwargs['doc_id']) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> current_space = self.kwargs['space_name'] <NEW_LINE> return '/spaces/{0}'.format(current_space) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(DeleteDocument, self).get_context_data(**kwargs) <NEW_LINE> context['get_place'] = get_object_or_404(Space, url=self.kwargs['space_name']) <NEW_LINE> return context
Returns a confirmation page before deleting the current document. :rtype: Confirmation :context: get_place
625990441f5feb6acb163f07
@attr.s <NEW_LINE> class HashSumValidator(FileValidator): <NEW_LINE> <INDENT> hash_sum: Union[str, bytes, bytearray] = attr.ib( validator=attr.validators.instance_of((str, bytes, bytearray)), converter=lambda x: bytes.fromhex(x) if isinstance(x, str) else x, default=None ) <NEW_LINE> hash_type: HashType = attr.ib( validator=attr.validators.in_(HashType), default=HashType.MD5, converter=lambda x: HashType.MD5 if x is None else HashType(x) ) <NEW_LINE> def validate(self, path: Path): <NEW_LINE> <INDENT> super().validate(path) <NEW_LINE> hash_obj = utils.calc_hash(path, self.hash_type) <NEW_LINE> if not compare_digest(hash_obj.digest(), self.hash_sum): <NEW_LINE> <INDENT> raise ValidationError( f"Hash sum of file '{path}': '{hash_obj.hexdigest()}' " f"mismatches the provided one '{self.hash_sum.hex()}'")
Validator of hash-sum for the provided file and expected hash-sum for this file. Attributes ---------- hash_sum: Union[str, bytes, bytearray] Hexadecimal string or byte-array object with expected hash-sum value of validated file. hash_type: HashType Type of hash sum. See `Hashtype` for more information
6259904407f4c71912bb0745
class Host(object): <NEW_LINE> <INDENT> def __init__(self, session): <NEW_LINE> <INDENT> self._session = session <NEW_LINE> <DEDENT> def host_power_action(self, host, action): <NEW_LINE> <INDENT> host_mor = vm_util.get_host_ref(self._session) <NEW_LINE> LOG.debug(_("%(action)s %(host)s"), {'action': action, 'host': host}) <NEW_LINE> if action == "reboot": <NEW_LINE> <INDENT> host_task = self._session._call_method( self._session._get_vim(), "RebootHost_Task", host_mor, force=False) <NEW_LINE> <DEDENT> elif action == "shutdown": <NEW_LINE> <INDENT> host_task = self._session._call_method( self._session._get_vim(), "ShutdownHost_Task", host_mor, force=False) <NEW_LINE> <DEDENT> elif action == "startup": <NEW_LINE> <INDENT> host_task = self._session._call_method( self._session._get_vim(), "PowerUpHostFromStandBy_Task", host_mor, timeoutSec=60) <NEW_LINE> <DEDENT> self._session._wait_for_task(host, host_task) <NEW_LINE> <DEDENT> def host_maintenance_mode(self, host, mode): <NEW_LINE> <INDENT> host_mor = vm_util.get_host_ref(self._session) <NEW_LINE> LOG.debug(_("Set maintenance mod on %(host)s to %(mode)s"), {'host': host, 'mode': mode}) <NEW_LINE> if mode: <NEW_LINE> <INDENT> host_task = self._session._call_method( self._session._get_vim(), "EnterMaintenanceMode_Task", host_mor, timeout=0, evacuatePoweredOffVms=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> host_task = self._session._call_method( self._session._get_vim(), "ExitMaintenanceMode_Task", host_mor, timeout=0) <NEW_LINE> <DEDENT> self._session._wait_for_task(host, host_task) <NEW_LINE> <DEDENT> def set_host_enabled(self, _host, enabled): <NEW_LINE> <INDENT> pass
Implements host related operations.
62599044d53ae8145f919772
class Fluorescence(Instruction): <NEW_LINE> <INDENT> def __init__(self, ref, wells, excitation, emission, dataref, flashes=25): <NEW_LINE> <INDENT> super(Fluorescence, self).__init__({ "op": "fluorescence", "object": ref, "wells": wells, "excitation": excitation, "emission": emission, "num_flashes": flashes, "dataref": dataref })
Read the fluoresence for the indicated wavelength for the indicated wells. Append a Fluorescence instruction to the list of instructions for this Protocol object. Parameters ---------- ref : str, Container wells : list, WellGroup WellGroup of wells to be measured or a list of well references in the form of ["A1", "B1", "C5", ...] excitation : str, Unit wavelength of light used to excite the wells indicated emission : str, Unit wavelength of light to be measured for the indicated wells dataref : str name of this specific dataset of measured absorbances flashes : int, optional
6259904426238365f5fade6e
class Option(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.update(*args, **kwargs) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> value = Option(value) <NEW_LINE> <DEDENT> return super(Option, self).__setitem__(key, value) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return super(Option, self).__getitem__(name) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __delitem__(self, name): <NEW_LINE> <INDENT> return super(Option, self).__delitem__(name) <NEW_LINE> <DEDENT> __getattr__ = __getitem__ <NEW_LINE> __setattr__ = __setitem__ <NEW_LINE> def __real_update(self, key, value): <NEW_LINE> <INDENT> if self.get(key) and isinstance(self[key], Option): <NEW_LINE> <INDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> value = Option(value) <NEW_LINE> <DEDENT> if isinstance(value, Option): <NEW_LINE> <INDENT> self[key].update(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> <DEDENT> def update(self, *args, **kwargs): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> if len(args) > 1: <NEW_LINE> <INDENT> raise TypeError( 'update expected at most 1 arguments, got {}' .format(len(args))) <NEW_LINE> <DEDENT> arg = dict(args[0]) <NEW_LINE> for key, value in arg.items(): <NEW_LINE> <INDENT> self.__real_update(key, value) <NEW_LINE> <DEDENT> <DEDENT> for key, value in kwargs.items(): <NEW_LINE> <INDENT> self.__real_update(key, value)
Optionurations
6259904407d97122c4217fb4
class CelerySignalProcessor(RealTimeSignalProcessor): <NEW_LINE> <INDENT> def handle_save(self, sender, instance, **kwargs): <NEW_LINE> <INDENT> app_label = instance._meta.app_label <NEW_LINE> model_name = instance._meta.model_name <NEW_LINE> transaction.on_commit(lambda: handle_save.delay(instance.pk, app_label, model_name)) <NEW_LINE> <DEDENT> def handle_pre_delete(self, sender, instance, **kwargs): <NEW_LINE> <INDENT> app_label = instance._meta.app_label <NEW_LINE> model_name = instance._meta.model_name <NEW_LINE> handle_pre_delete.delay(instance.pk, app_label, model_name) <NEW_LINE> <DEDENT> def handle_delete(self, sender, instance, **kwargs): <NEW_LINE> <INDENT> app_label = instance._meta.app_label <NEW_LINE> model_name = instance._meta.model_name <NEW_LINE> handle_delete.delay(instance.pk, app_label, model_name)
Celery signal processor. Allows automatic updates on the index as delayed background tasks using Celery. NB: We cannot process deletes as background tasks. By the time the Celery worker would pick up the delete job, the model instance would already deleted. We can get around this by setting Celery to use `pickle` and sending the object to the worker, but using `pickle` opens the application up to security concerns.
62599044d99f1b3c44d069b3
class Client(User): <NEW_LINE> <INDENT> pass
Client Model.
6259904494891a1f408ba080
class TagInfoUnit(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TagKey = None <NEW_LINE> self.TagValue = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TagKey = params.get("TagKey") <NEW_LINE> self.TagValue = params.get("TagValue")
tag信息单元
62599044b830903b9686ee04
class _NoBlockType(Base_Block): <NEW_LINE> <INDENT> def __new__(cls): <NEW_LINE> <INDENT> return NoBlock <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return (_NoBlockType, ()) <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> return NoBlock <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> return NoBlock <NEW_LINE> <DEDENT> def train(self, default): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test(self, default): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __call__(self, default): <NEW_LINE> <INDENT> pass
Default value for input block, meaning "there are no input block defined yet" Consequently, when a block has NoBlock as an input, you cannot compute it
62599044379a373c97d9a33e
class ExtendVerifier(Step): <NEW_LINE> <INDENT> COMMAND = "verify add-verifier-ext --source %(source)s" <NEW_LINE> DEPENDS_ON = CreateVerifier <NEW_LINE> CALL_ARGS = {"source": "https://git.openstack.org/openstack/" "keystone-tempest-plugin"}
Extend verifier with keystone integration tests.
62599044507cdc57c63a60b1
class SourceConfiguration(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "AuthenticationConfiguration": (AuthenticationConfiguration, False), "AutoDeploymentsEnabled": (boolean, False), "CodeRepository": (CodeRepository, False), "ImageRepository": (ImageRepository, False), }
`SourceConfiguration <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-apprunner-service-sourceconfiguration.html>`__
62599044d10714528d69f017
class DeprecationStatus(messages.Message): <NEW_LINE> <INDENT> deleted = messages.StringField(1) <NEW_LINE> deprecated = messages.StringField(2) <NEW_LINE> obsolete = messages.StringField(3) <NEW_LINE> replacement = messages.StringField(4) <NEW_LINE> state = messages.StringField(5)
A DeprecationStatus object. Fields: deleted: A string attribute. deprecated: A string attribute. obsolete: A string attribute. replacement: A string attribute. state: A string attribute.
62599044e64d504609df9d5b
class DefaultProperties(Virtual, PropertySheet, View): <NEW_LINE> <INDENT> id = 'default' <NEW_LINE> _md = {'xmlns': 'http://www.zope.org/propsets/default'}
The default property set mimics the behavior of old-style Zope properties -- it stores its property values in the instance of its owner.
6259904407d97122c4217fb5
class SpellingTest(AbstractRuleTest): <NEW_LINE> <INDENT> ERROR_CUTOFF=0.05 <NEW_LINE> def __init__(self, case_sensitive=False): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.case_sensitive = case_sensitive <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.spelling_map = dict() <NEW_LINE> self.total_occurrences = dict() <NEW_LINE> <DEDENT> def end(self): <NEW_LINE> <INDENT> status = True <NEW_LINE> for tagspec, spellings in self.spelling_map.items(): <NEW_LINE> <INDENT> correction_cache = {} <NEW_LINE> if len(spellings) == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> mean_frequency = self.total_occurrences[tagspec] / len(spellings) <NEW_LINE> standard_deviation = math.sqrt( sum(map(lambda n: (len(n)-mean_frequency)**2, spellings.values())) / len(spellings) ) <NEW_LINE> variance_coefficient = standard_deviation / mean_frequency <NEW_LINE> if variance_coefficient > 1.0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> good_spellings = list() <NEW_LINE> for spelling, locations in spellings.items(): <NEW_LINE> <INDENT> if len(locations) > mean_frequency * SpellingTest.ERROR_CUTOFF: <NEW_LINE> <INDENT> good_spellings.append(spelling) <NEW_LINE> spellings[spelling] = None <NEW_LINE> <DEDENT> <DEDENT> for spelling, locations in spellings.items(): <NEW_LINE> <INDENT> if locations is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if spelling in correction_cache: <NEW_LINE> <INDENT> correction = correction_cache['spelling'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> correction = find_closest_match(spelling, good_spellings) <NEW_LINE> correction_cache[spelling] = correction <NEW_LINE> <DEDENT> if correction is not None: <NEW_LINE> <INDENT> status = False <NEW_LINE> for location in locations: <NEW_LINE> <INDENT> self.report_error( 'Possible spelling error', value=location[2], row=location[0], column=location[1], suggested_value=correction, scope='cell' ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return status <NEW_LINE> <DEDENT> def validate_cell(self, value, row, column): <NEW_LINE> <INDENT> tagspec = column.get_display_tag(sort_attributes=True) <NEW_LINE> if self.case_sensitive: <NEW_LINE> <INDENT> cooked_value = hxl.datatypes.normalise_space(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cooked_value = hxl.datatypes.normalise_string(value) <NEW_LINE> <DEDENT> self.total_occurrences[tagspec] = self.total_occurrences.setdefault(tagspec, 0) + 1 <NEW_LINE> self.spelling_map.setdefault(tagspec, {}).setdefault(cooked_value, []).append((row, column, value,)) <NEW_LINE> return True
Detect spelling outliers in a column HXL schema: #valid_value+spelling Will treat numbers and dates as strings, so use this only in columns where you expect text, and frequently-repeated values (e.g. #status, #org+name, #sector+name). Will skip validation if the coefficient of variation > 1.0 Collects all of the spelling variants first, then checks the rare ones in the end() method, and reports any ones that have near matches among the common ones.
62599044a4f1c619b294f812
class MetaCubic(Sprite): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> super(MetaCubic, self).__init__() <NEW_LINE> self.image = pygame.image.load(path) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> <DEDENT> def update(self, game_settings, event_key): <NEW_LINE> <INDENT> if event_key == pygame.K_DOWN: <NEW_LINE> <INDENT> self.rect.y += game_settings.cubic_move_dist <NEW_LINE> <DEDENT> if event_key == pygame.K_LEFT: <NEW_LINE> <INDENT> self.rect.x -= game_settings.cubic_move_dist <NEW_LINE> <DEDENT> if event_key == pygame.K_RIGHT: <NEW_LINE> <INDENT> self.rect.x += game_settings.cubic_move_dist <NEW_LINE> <DEDENT> if event_key == 'up': <NEW_LINE> <INDENT> self.rect.y -= game_settings.cubic_move_dist <NEW_LINE> <DEDENT> if event_key == 'down': <NEW_LINE> <INDENT> self.rect.y += game_settings.cubic_move_dist
创建元方块
6259904424f1403a92686257
class RangeNormalize(object): <NEW_LINE> <INDENT> def __init__(self, min_val, max_val): <NEW_LINE> <INDENT> self.min_val = min_val <NEW_LINE> self.max_val = max_val <NEW_LINE> <DEDENT> def __call__(self, *inputs): <NEW_LINE> <INDENT> outputs = [] <NEW_LINE> for idx, _input in enumerate(inputs): <NEW_LINE> <INDENT> _min_val = _input.min() <NEW_LINE> _max_val = _input.max() <NEW_LINE> a = (self.max_val - self.min_val) / (_max_val - _min_val) <NEW_LINE> b = self.max_val- a * _max_val <NEW_LINE> _input = _input.mul(a).add(b) <NEW_LINE> outputs.append(_input) <NEW_LINE> <DEDENT> return outputs if idx > 1 else outputs[0]
Given min_val: (R, G, B) and max_val: (R,G,B), will normalize each channel of the th.*Tensor to the provided min and max values. Works by calculating : a = (max'-min')/(max-min) b = max' - a * max new_value = a * value + b where min' & max' are given values, and min & max are observed min/max for each channel Arguments --------- min_range : float or integer Min value to which tensors will be normalized max_range : float or integer Max value to which tensors will be normalized fixed_min : float or integer Give this value if every sample has the same min (max) and you know for sure what it is. For instance, if you have an image then you know the min value will be 0 and the max value will be 255. Otherwise, the min/max value will be calculated for each individual sample and this will decrease speed. Dont use this if each sample has a different min/max. fixed_max :float or integer See above Example: >>> x = th.rand(3,5,5) >>> rn = RangeNormalize((0,0,10),(1,1,11)) >>> x_norm = rn(x) Also works with just one value for min/max: >>> x = th.rand(3,5,5) >>> rn = RangeNormalize(0,1) >>> x_norm = rn(x)
625990448a43f66fc4bf34a8
class GSMetadata(BaseModule): <NEW_LINE> <INDENT> PROFILE_RE = re.compile(r'profile-(?P<dotted_name>[\w.]+):[\w\W]+') <NEW_LINE> @classmethod <NEW_LINE> def create_from_files(cls, top_dir): <NEW_LINE> <INDENT> if top_dir.endswith('.py'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for path, folders, filenames in os.walk(top_dir): <NEW_LINE> <INDENT> for filename in filenames: <NEW_LINE> <INDENT> if filename == 'metadata.xml': <NEW_LINE> <INDENT> yield cls( top_dir, os.path.join(path, filename), ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def scan(self): <NEW_LINE> <INDENT> tree = ElementTree.parse(self.path).getroot() <NEW_LINE> for node in tree.iter('dependency'): <NEW_LINE> <INDENT> if not node.text: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> result = self.PROFILE_RE.search(node.text.strip()) <NEW_LINE> if result: <NEW_LINE> <INDENT> yield DottedName( result.group('dotted_name'), file_path=self.path, is_test=self.testing, )
Extract imports from Generic Setup metadata.xml files These files are in common use in Zope/Plone to define Generic Setup profile dependencies between projects.
62599044507cdc57c63a60b3
class FileService(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'file_server' <NEW_LINE> <DEDENT> name = models.CharField('文件名称', max_length=64, null=True) <NEW_LINE> path = models.CharField('文件链接', max_length=128) <NEW_LINE> type = models.CharField('文件类型', max_length=8, null=True) <NEW_LINE> size = models.CharField('文件大小', max_length=8, null=True) <NEW_LINE> note = models.TextField('备注', default='') <NEW_LINE> is_active = models.BooleanField('状态', default=True) <NEW_LINE> created_time = models.DateTimeField('创建时间', auto_now_add=True) <NEW_LINE> updated_time = models.DateTimeField('更新时间', auto_now=True)
文件服务
6259904415baa723494632a8
class _MetaData(dict): <NEW_LINE> <INDENT> blankrecord = None <NEW_LINE> dfd = None <NEW_LINE> fields = None <NEW_LINE> field_count = 0 <NEW_LINE> field_types = None <NEW_LINE> filename = None <NEW_LINE> ignorememos = False <NEW_LINE> memoname = None <NEW_LINE> mfd = None <NEW_LINE> memo = None <NEW_LINE> memofields = None <NEW_LINE> newmemofile = False <NEW_LINE> nulls = None <NEW_LINE> user_fields = None <NEW_LINE> user_field_count = 0
Container class for storing per table metadata
6259904426068e7796d4dc5c
class ArmAmplifier(Amplifier): <NEW_LINE> <INDENT> def __init__(self, logger=None): <NEW_LINE> <INDENT> super(ArmAmplifier, self).__init__(logger) <NEW_LINE> self.cnv = [{'sep':-0.6, 'step':-100, 'speed':10,}, {'sep':-0.2, 'step':-50, 'speed':10,}, {'sep':0.3, 'step':0, 'speed':0,}, {'sep':0.7, 'step':50, 'speed':10,},] <NEW_LINE> self.cnv_last = {'step':100, 'speed':10,}
for beam input to armservo Input value is -1.0 to 1.0. 0.0 is center.
62599044a8ecb03325872527
class RunPort(Base): <NEW_LINE> <INDENT> __tablename__ = 'run_ports' <NEW_LINE> port_number = Column(Integer, primary_key=True) <NEW_LINE> run_id = Column(Integer, ForeignKey('runs.id', ondelete='CASCADE'), primary_key=True) <NEW_LINE> run = relationship('Run', uselist=False, back_populates='ports') <NEW_LINE> type = Column(Text, nullable=False, default='http') <NEW_LINE> map_host = Column(Text, nullable=True)
A network port to be exposed from the experiment container.
6259904473bcbd0ca4bcb5a2
class TestPrepareDocsS3(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls._connection = PrepareDocsS3.PrepareDocsS3( 'fixtures/national-archives-and-records-administration/' ) <NEW_LINE> cls._connection.custom_parser = parse_foiaonline_metadata <NEW_LINE> <DEDENT> @moto.mock_s3 <NEW_LINE> def test_upload_docs_s3(self): <NEW_LINE> <INDENT> conn = boto.connect_s3() <NEW_LINE> conn.create_bucket('testbucket') <NEW_LINE> self._connection.s3_bucket = conn.get_bucket('testbucket') <NEW_LINE> fixture_path = 'fixtures/national-archives-and-records-administration' <NEW_LINE> fixtures = os.path.join(LOCAL_PATH, fixture_path) <NEW_LINE> for dirpath, dirnames, filenames in os.walk(fixtures): <NEW_LINE> <INDENT> for item in filenames: <NEW_LINE> <INDENT> k = Key(self._connection.s3_bucket) <NEW_LINE> s3_loc = os.path.join(dirpath.replace(LOCAL_PATH, ''), item) <NEW_LINE> k.key = s3_loc <NEW_LINE> k.set_contents_from_filename(os.path.join(dirpath, item)) <NEW_LINE> <DEDENT> <DEDENT> returned_file = conn.get_bucket('testbucket').get_key(s3_loc) <NEW_LINE> self.assertEqual(returned_file.name, s3_loc) <NEW_LINE> self._connection.prepare_documents() <NEW_LINE> manifest_location = os.path.join( 'fixtures', 'national-archives-and-records-administration', '20150331', 'manifest.yaml') <NEW_LINE> manifest = self._connection.s3_bucket.get_key(manifest_location) .get_contents_as_string() <NEW_LINE> manifest = yaml.load(manifest) <NEW_LINE> self.assertEqual(len(manifest), 3)
Test that PrepareDocsS3 works generates manifest entirely on S3
6259904423e79379d538d814