code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class shutdown(ipc): <NEW_LINE> <INDENT> pass
Message used to indicate that the component recieving it should shutdown. Due to legacy mistakes, use shutdownMicroprocess instead.
6259906529b78933be26ac66
class MilestoneNote(AccessControlShortcutMixin, TimestampMixin, models.Model): <NEW_LINE> <INDENT> note = fields.RichTextField(_('note'), blank=True) <NEW_LINE> milestone = models.ForeignKey(Milestone, related_name='note', related_query_name='note', null=True) <NEW_LINE> @property <NEW_LINE> def owner_group(self): <NEW_LINE> <INDENT> return self.milestone.owner_group <NEW_LINE> <DEDENT> @property <NEW_LINE> def member_group(self): <NEW_LINE> <INDENT> return self.milestone.member_group <NEW_LINE> <DEDENT> @property <NEW_LINE> def viewer_group(self): <NEW_LINE> <INDENT> return self.milestone.viewer_group
Stores a note attached to a milestone object
6259906592d797404e3896ff
class HalfOrc(CharacterRace): <NEW_LINE> <INDENT> def __init__(self, race_name=None): <NEW_LINE> <INDENT> super().__init__(race_name if race_name else Races.HALF_ORC) <NEW_LINE> self._ability_score_increases = [ (Abilities.STRENGTH, 2), (Abilities.CONSTITUTION, 1) ] <NEW_LINE> self._age = Age(75, 14) <NEW_LINE> self._size = Sizes.MEDIUM <NEW_LINE> self._speed = 30 <NEW_LINE> self._languages = [ Languages.COMMON, Languages.ORC ] <NEW_LINE> self._senses = [ Darkvision(60) ] <NEW_LINE> self._skills = [ Skills.INTIMIDATION ]
Class docstring.
625990652ae34c7f260ac82c
class IPHandler(BaseHandler): <NEW_LINE> <INDENT> methods_allowed = ('GET',) <NEW_LINE> fields = ('botnettype', 'host', 'longitude', 'latitude', 'botnethashvalue', 'config', 'firstseen', 'lastseen') <NEW_LINE> model = Botnet <NEW_LINE> @throttle(5, 1*60) <NEW_LINE> def read(self, request, ip): <NEW_LINE> <INDENT> ip = RelatedIPs.objects.get(ip=ip) <NEW_LINE> botnet = Botnet.objects.filter(id=ip.botnet.id) <NEW_LINE> return botnet
Returns information about botnet(s) /api/ip/addr will reply with botnet(s) info for those that have detected an IP with number addr
625990658a43f66fc4bf38d4
class th_buildin_method(ThpyPlugin): <NEW_LINE> <INDENT> title = "methods" <NEW_LINE> description = "Python methods" <NEW_LINE> keyword = { u"นี่":"this", u"กรอก":"input", u"รับ":"input", u"ข้อความ":"str", u"รายการ": "list", u"ตัวเลข":"int", u"จำนวนจริง":"float", u"ความยาว":"len", u"ช่วง":"range", u"ชนิด":"type", u"ช่วย":"help", u"เอกสาร":"help", }
python th methods
625990654428ac0f6e659c76
class EventListView(TemplateView): <NEW_LINE> <INDENT> template_name = 'multilingual_events/event_list.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> ctx = super(EventListView, self).get_context_data(**kwargs) <NEW_LINE> upcoming = Event.objects.get_upcoming(self.request) <NEW_LINE> archived = Event.objects.get_archived(self.request) <NEW_LINE> ctx.update({ 'upcoming_events': upcoming, 'archived_events': archived, }) <NEW_LINE> return ctx
A view that lists all upcoming events for the current language.
625990657d847024c075db1b
class APIGatewayRequestAuthorizerEvent(TypedDict): <NEW_LINE> <INDENT> type: Literal["REQUEST"] <NEW_LINE> methodArn: str <NEW_LINE> resource: str <NEW_LINE> path: str <NEW_LINE> httpMethod: str <NEW_LINE> headers: Dict[str, str] <NEW_LINE> queryStringParameters: Dict[str, str] <NEW_LINE> pathParameters: Dict[str, str] <NEW_LINE> stageVariables: Dict[str, str] <NEW_LINE> requestContext: Dict[str, Any]
APIGatewayRequestAuthorizerEvent https://docs.aws.amazon.com/apigateway/latest/developerguide/api-gateway-lambda-authorizer-input.html Attributes: ---------- type: Literal['REQUEST'] methodArn: str resource: str path: str httpMethod: str headers: Dict[str, str] queryStringParameters: Dict[str, str] pathParameters: Dict[str, str] stageVariables: Dict[str, str] requestContext: Dict[str, Any]
62599065f7d966606f74945c
class Event(object): <NEW_LINE> <INDENT> def __init__(self, name, source_states, destination_state, callbacks=None): <NEW_LINE> <INDENT> if name is None or len(name) == 0: <NEW_LINE> <INDENT> raise StateMachineEventError(STATE_MACHINE.INVALID_EVENT_IDENTIFIER) <NEW_LINE> <DEDENT> if destination_state is None or not isinstance(destination_state, State): <NEW_LINE> <INDENT> raise StateMachineEventError(STATE_MACHINE.INVALID_EVENT_DESTINATION_STATE_IDENTIFIER, substitute={'event': name}) <NEW_LINE> <DEDENT> invalid_states = [f'{state} ({type(state).__name__})' for state in source_states if not isinstance(state, State)] <NEW_LINE> if len(invalid_states) > 0: <NEW_LINE> <INDENT> raise StateMachineEventError(STATE_MACHINE.INVALID_EVENT_SOURCE_STATES_IDENTIFIER, substitute={'states': invalid_states}) <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.source_states = set(source_states) <NEW_LINE> self.destination_state = destination_state <NEW_LINE> self._should_fire_event = callbacks.should_fire_event if callbacks else None <NEW_LINE> self._will_fire_event = callbacks.will_fire_event if callbacks else None <NEW_LINE> self._did_fire_event = callbacks.did_fire_event if callbacks else None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'<{self.__class__.__name__} {hex(id(self))}> {self.name}' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'<{self.__class__.__name__} {self.name}>' <NEW_LINE> <DEDENT> def should_fire(self, transition): <NEW_LINE> <INDENT> fire_event = False <NEW_LINE> if self._should_fire_event: <NEW_LINE> <INDENT> fire_event = self._should_fire_event(transition) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fire_event = True <NEW_LINE> <DEDENT> return fire_event <NEW_LINE> <DEDENT> def will_fire(self, transition): <NEW_LINE> <INDENT> if self._will_fire_event: <NEW_LINE> <INDENT> self._will_fire_event(transition) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def did_fire(self, transition): <NEW_LINE> <INDENT> if self._did_fire_event: <NEW_LINE> <INDENT> self._did_fire_event(transition) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass
The class describes an event within a state machine that causes a transition between states. Each event has a descriptive name and specifies the state that the machine will transition into after the event has been fired. Events can optionally be constrained to a set of source states that the machine must be in for the event to fire.
625990657d847024c075db1c
class AbstractReducer(metaclass=ABCMeta): <NEW_LINE> <INDENT> ForkingPickler = ForkingPickler <NEW_LINE> register = register <NEW_LINE> dump = dump <NEW_LINE> send_handle = send_handle <NEW_LINE> recv_handle = recv_handle <NEW_LINE> if sys.platform == 'win32': <NEW_LINE> <INDENT> steal_handle = steal_handle <NEW_LINE> duplicate = duplicate <NEW_LINE> DupHandle = DupHandle <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sendfds = sendfds <NEW_LINE> recvfds = recvfds <NEW_LINE> DupFd = DupFd <NEW_LINE> <DEDENT> _reduce_method = _reduce_method <NEW_LINE> _reduce_method_descriptor = _reduce_method_descriptor <NEW_LINE> _rebuild_partial = _rebuild_partial <NEW_LINE> _reduce_socket = _reduce_socket <NEW_LINE> _rebuild_socket = _rebuild_socket <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> register(type(_C().f), _reduce_method) <NEW_LINE> register(type(list.append), _reduce_method_descriptor) <NEW_LINE> register(type(int.__add__), _reduce_method_descriptor) <NEW_LINE> register(functools.partial, _reduce_partial) <NEW_LINE> register(socket.socket, _reduce_socket)
Abstract base class for use in implementing a Reduction class suitable for use in replacing the standard reduction mechanism used in multiprocessing_on_dill.
625990653eb6a72ae038bda4
class _TelnetParser: <NEW_LINE> <INDENT> def __init__(self, trace : CoreTrace): <NEW_LINE> <INDENT> self.userData = bytearray() <NEW_LINE> self.currCmd = bytearray() <NEW_LINE> self.currSb = bytearray() <NEW_LINE> self.unparsed = bytearray() <NEW_LINE> self.trace = trace <NEW_LINE> <DEDENT> def __Trace(self, text : str): <NEW_LINE> <INDENT> self.trace.Add(self, text) <NEW_LINE> <DEDENT> def __ProcessCmd(self, byte : int): <NEW_LINE> <INDENT> assert not self.currSb <NEW_LINE> self.currCmd.append(byte) <NEW_LINE> if len(self.currCmd) == self.prof.cmdLen: <NEW_LINE> <INDENT> self.currCmd = b'' <NEW_LINE> <DEDENT> <DEDENT> def __ProcessSb(self, byte : int): <NEW_LINE> <INDENT> assert not self.currCmd <NEW_LINE> self.currSb.append(byte) <NEW_LINE> if byte == TelnetConst.SE and self.currSb[-1] == TelnetConst.IAC: <NEW_LINE> <INDENT> self.currSb = b'' <NEW_LINE> <DEDENT> <DEDENT> def __ProcessUserData(self, byte : int): <NEW_LINE> <INDENT> assert not self.currCmd and not self.currSb <NEW_LINE> self.userData.append(byte) <NEW_LINE> <DEDENT> def Parse(self, byteData : bytes) -> bytes: <NEW_LINE> <INDENT> toParse = self.unparsed + byteData <NEW_LINE> self.__Trace("Parse: {0}".format(byteData)) <NEW_LINE> self.unparsed = bytearray() <NEW_LINE> i = 0 <NEW_LINE> while i < len(toParse): <NEW_LINE> <INDENT> curr = toParse[i] <NEW_LINE> i += 1 <NEW_LINE> if curr != TelnetConst.IAC: <NEW_LINE> <INDENT> if self.currCmd: self.__ProcessCmd(curr) <NEW_LINE> elif self.currSb: self.__ProcessSb(curr) <NEW_LINE> else: self.__ProcessUserData(curr) <NEW_LINE> continue <NEW_LINE> <DEDENT> if i >= len(toParse): <NEW_LINE> <INDENT> if self.currSb: self.__ProcessSb(curr) <NEW_LINE> else: self.unparsed = curr <NEW_LINE> break <NEW_LINE> <DEDENT> next = toParse[i] <NEW_LINE> i += 1 <NEW_LINE> if next == TelnetConst.SB: <NEW_LINE> <INDENT> self.__ProcessSb(curr) <NEW_LINE> self.__ProcessSb(next) <NEW_LINE> <DEDENT> elif next == TelnetConst.IAC: <NEW_LINE> <INDENT> if self.currSb: self.__ProcessSb(next) <NEW_LINE> else: <NEW_LINE> <INDENT> self.__ProcessUserData(curr) <NEW_LINE> self.__ProcessUserData(next) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.__ProcessCmd(curr) <NEW_LINE> self.__ProcessCmd(next) <NEW_LINE> <DEDENT> <DEDENT> res = self.userData <NEW_LINE> self.userData = bytearray() <NEW_LINE> self.__Trace("Parsed result: {0}".format(res)) <NEW_LINE> return res <NEW_LINE> <DEDENT> def EnableTrace(self, state : bool): <NEW_LINE> <INDENT> self.trace.Enable(state)
Parsing data receiving via Telnet, split it to user data and telnet commands.
62599065009cb60464d02c7d
class TestVolumeDefBindRw(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testVolumeDefBindRw(self): <NEW_LINE> <INDENT> pass
VolumeDefBindRw unit test stubs
6259906599fddb7c1ca63971
class Sequential(object): <NEW_LINE> <INDENT> def __init__(self, a): <NEW_LINE> <INDENT> self.a = a <NEW_LINE> <DEDENT> def __call__(self, time_scale=1): <NEW_LINE> <INDENT> r = 2 <NEW_LINE> t = map(lambda x: x * time_scale, [1.0, 0.5, 1.5, 2.0]) <NEW_LINE> for identity, sleep in enumerate(t, 1): <NEW_LINE> <INDENT> r = self.a(r, sleep=sleep, identity=identity) <NEW_LINE> <DEDENT> return r
Chain activity calls. 10 ---------4| 5 ---16| 15 ------------256| R 20 ---------------65536 Duration: 50 Result: 0
6259906545492302aabfdc21
class StorageArrayChangeablePropertiesTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_storage_array_changeable_properties(self): <NEW_LINE> <INDENT> storage_array_changeable_properties_obj = StorageArrayChangeableProperties() <NEW_LINE> self.assertNotEqual(storage_array_changeable_properties_obj, None)
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990654a966d76dd5f063b
class quality_acceptation(osv.osv): <NEW_LINE> <INDENT> _inherit = 'quality.acceptation' <NEW_LINE> _columns = { 'line_ids': fields.one2many('quality.acceptation.line', 'acceptation_id', 'Lines'), }
Acceptation form
625990654f6381625f19a047
class ParamItem(ListItem): <NEW_LINE> <INDENT> _columns = ["name", "value", "on_entry", "on_exit", "on_error", "collect"] <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._item_data[0].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._item_data[1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def on_entry(self): <NEW_LINE> <INDENT> return self._item_data[2].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def on_exit(self): <NEW_LINE> <INDENT> return self._item_data[3].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def on_error(self): <NEW_LINE> <INDENT> return self._item_data[4].text <NEW_LINE> <DEDENT> @property <NEW_LINE> def collect(self): <NEW_LINE> <INDENT> return self._item_data[5].text
Represents a parameter in the list
6259906599cbb53fe683262a
class BodyDataStatisticsPass2: <NEW_LINE> <INDENT> def __init__(self, context): <NEW_LINE> <INDENT> self._context = context <NEW_LINE> if (self._context.mean is None) or (self._context.global_mean is None): <NEW_LINE> <INDENT> raise ValueError("Pass 1 need to run first.") <NEW_LINE> <DEDENT> self._context = context <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def add(self, item): <NEW_LINE> <INDENT> if self._context.std is None: <NEW_LINE> <INDENT> self._context.std = (item - self._context.mean)**2 <NEW_LINE> self._context.global_std = np.sum((item - self._context.global_mean)**2, axis=0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._context.std += (item - self._context.mean)**2 <NEW_LINE> self._context.global_std += np.sum((item - self._context.global_mean)**2, axis=0) <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> self._context.std = np.sqrt(self._context.std / self._context.count) <NEW_LINE> self._context.global_std = np.sqrt(self._context.global_std / (self._context.count * self._context.mean.shape[0]))
A helper class that compute the standard deviation and global std for skeleton data.
62599065d486a94d0ba2d70f
class CertificateMergeParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'x509_certificates': {'required': True}, } <NEW_LINE> _attribute_map = { 'x509_certificates': {'key': 'x5c', 'type': '[bytearray]'}, 'certificate_attributes': {'key': 'attributes', 'type': 'CertificateAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(CertificateMergeParameters, self).__init__(**kwargs) <NEW_LINE> self.x509_certificates = kwargs['x509_certificates'] <NEW_LINE> self.certificate_attributes = kwargs.get('certificate_attributes', None) <NEW_LINE> self.tags = kwargs.get('tags', None)
The certificate merge parameters. All required parameters must be populated in order to send to Azure. :param x509_certificates: Required. The certificate or the certificate chain to merge. :type x509_certificates: list[bytearray] :param certificate_attributes: The attributes of the certificate (optional). :type certificate_attributes: ~azure.keyvault.v7_1.models.CertificateAttributes :param tags: A set of tags. Application specific metadata in the form of key-value pairs. :type tags: dict[str, str]
62599065adb09d7d5dc0bcb0
class TestUniformMeshComponents(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> random.seed(987324987234) <NEW_LINE> cls.o, cls.r = test_reactors.loadTestReactor( TEST_ROOT, customSettings={"xsKernel": "MC2v2"} ) <NEW_LINE> cls.r.core.lib = isotxs.readBinary(ISOAA_PATH) <NEW_LINE> a = cls.r.core[4] <NEW_LINE> a[2].setHeight(a[2].getHeight() * 1.05) <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.converter = uniformMesh.NeutronicsUniformMeshConverter() <NEW_LINE> self.converter._sourceReactor = self.r <NEW_LINE> <DEDENT> def test_computeAverageAxialMesh(self): <NEW_LINE> <INDENT> refMesh = self.r.core.findAllAxialMeshPoints( [self.r.core.getFirstAssembly(Flags.FUEL)] )[ 1: ] <NEW_LINE> self.converter._computeAverageAxialMesh() <NEW_LINE> avgMesh = self.converter._uniformMesh <NEW_LINE> self.assertEqual(len(refMesh), len(avgMesh)) <NEW_LINE> self.assertEqual(refMesh[0], avgMesh[0]) <NEW_LINE> self.assertNotEqual(refMesh[4], avgMesh[4])
Tests individual operations of the uniform mesh converter Only loads reactor once per suite.
62599065009cb60464d02c7e
class MachineResolver(MethodsMixin, db.Model): <NEW_LINE> <INDENT> __tablename__ = 'machineresolver' <NEW_LINE> id = db.Column(db.Integer, primary_key=True, nullable=False) <NEW_LINE> name = db.Column(db.Unicode(255), default=u"", unique=True, nullable=False) <NEW_LINE> rtype = db.Column(db.Unicode(255), default=u"", nullable=False) <NEW_LINE> rconfig = db.relationship('MachineResolverConfig', lazy='dynamic', backref='machineresolver') <NEW_LINE> def __init__(self, name, rtype): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.rtype = rtype <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> ret = self.id <NEW_LINE> db.session.delete(self) <NEW_LINE> db.session.query(MachineResolverConfig) .filter(MachineResolverConfig.resolver_id == ret) .delete() <NEW_LINE> db.session.commit() <NEW_LINE> return ret
This model holds the definition to the machinestore. Machines could be located in flat files, LDAP directory or in puppet services or other... The usual MachineResolver just holds a name and a type and a reference to its config
6259906532920d7e50bc778c
class InventorySubRecipeService(Requests): <NEW_LINE> <INDENT> service_url = "/inventory/sub-recipes" <NEW_LINE> model_cls = InventorySubRecipeModel <NEW_LINE> def get_inventory_sub_recipes(self, **kwargs): <NEW_LINE> <INDENT> return self.get_list(**kwargs) <NEW_LINE> <DEDENT> def get_inventory_sub_recipe_by_id(self, pk): <NEW_LINE> <INDENT> return self.get_detail(pk) <NEW_LINE> <DEDENT> def get_inventory_ingredient_activities(self, sub_recipes_id): <NEW_LINE> <INDENT> url = self.get_service_url() <NEW_LINE> response = self.get(f"{url}/{sub_recipes_id}/activities") <NEW_LINE> return InventorySubRecipeModel(**response)
Inventory Ingredient service class to allow retrieving inventory SubRecipe related data
625990650c0af96317c57902
class RequestBrokerScrip(Message): <NEW_LINE> <INDENT> def __init__(self, sender, receiver, amount): <NEW_LINE> <INDENT> Message.__init__(self, sender, receiver) <NEW_LINE> self.amount = amount <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return Message.__str__(self) + str(self.amount) <NEW_LINE> <DEDENT> def as_json(self): <NEW_LINE> <INDENT> return json.dumps({'type': type(self).__name__, 'sender':str(self.sender), 'receiver': str(self.receiver), 'data': {'amount': str(self.amount)}})
A simple message from a customer to broker for buying scrip. Contains customer_ID and value of scrip he wants to buy.
625990658e71fb1e983bd20c
class TestMoveElasticsearchClusterConfiguration(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testMoveElasticsearchClusterConfiguration(self): <NEW_LINE> <INDENT> pass
MoveElasticsearchClusterConfiguration unit test stubs
6259906556ac1b37e6303888
class PasswordPanel(wx.Panel): <NEW_LINE> <INDENT> def __init__(self, parent, noteBook): <NEW_LINE> <INDENT> wx.Panel.__init__(self, noteBook) <NEW_LINE> self.parent = parent <NEW_LINE> self.frame = self.parent.parent <NEW_LINE> self.fullAccessLabel = wx.StaticText(self, -1, _(u"Full access password")) <NEW_LINE> self.fullAccessSet = wx.Button(self, -1, _(u"Set"), name="setFullAccess") <NEW_LINE> self.fullAccessSet.SetValidator(PasswordValidator(self)) <NEW_LINE> self.fullAccessUnset = wx.Button(self, -1, _(u"Unset"), name="unsetFullAccess") <NEW_LINE> self.fullAccessUnset.SetValidator(PasswordValidator(self)) <NEW_LINE> self.restrictedAccessLabel = wx.StaticText(self, -1, _(u"Restricted access password")) <NEW_LINE> self.restrictedAccessSet = wx.Button(self, -1, _(u"Set"), name="setRestrictedAccess") <NEW_LINE> self.restrictedAccessSet.SetValidator(PasswordValidator(self)) <NEW_LINE> self.restrictedAccessUnset = wx.Button(self, -1, _(u"Unset"), name="unsetRestrictedAccess") <NEW_LINE> self.restrictedAccessUnset.SetValidator(PasswordValidator(self)) <NEW_LINE> self.viewAccessLabel = wx.StaticText(self, -1, _(u"View access password")) <NEW_LINE> self.viewAccessSet = wx.Button(self, -1, _(u"Set"), name="setViewAccess") <NEW_LINE> self.viewAccessSet.SetValidator(PasswordValidator(self)) <NEW_LINE> self.viewAccessUnset = wx.Button(self, -1, _(u"Unset"), name="unsetViewAccess") <NEW_LINE> self.viewAccessUnset.SetValidator(PasswordValidator(self)) <NEW_LINE> self.__doProperties() <NEW_LINE> self.__doLayout() <NEW_LINE> Safety(self) <NEW_LINE> <DEDENT> def __doProperties(self): <NEW_LINE> <INDENT> self.SetName("passwordPanel") <NEW_LINE> <DEDENT> def __doLayout(self): <NEW_LINE> <INDENT> accessSizer = wx.FlexGridSizer(rows=3, cols=3, hgap=4, vgap=4) <NEW_LINE> accessSizer.Add(self.fullAccessLabel, 0, wx.ALIGN_CENTER_VERTICAL, 0) <NEW_LINE> accessSizer.Add(self.fullAccessSet, 0, wx.ALL, 0) <NEW_LINE> accessSizer.Add(self.fullAccessUnset, 0, wx.ALL, 0) <NEW_LINE> accessSizer.Add(self.restrictedAccessLabel, 0, wx.ALIGN_CENTER_VERTICAL, 0) <NEW_LINE> accessSizer.Add(self.restrictedAccessSet, 0, wx.ALL, 0) <NEW_LINE> accessSizer.Add(self.restrictedAccessUnset, 0, wx.ALL, 0) <NEW_LINE> accessSizer.Add(self.viewAccessLabel, 0, wx.ALIGN_CENTER_VERTICAL, 0) <NEW_LINE> accessSizer.Add(self.viewAccessSet, 0, wx.ALL, 0) <NEW_LINE> accessSizer.Add(self.viewAccessUnset, 0, wx.ALL, 0) <NEW_LINE> accessSizer.AddGrowableCol(2) <NEW_LINE> self.SetSizer(accessSizer) <NEW_LINE> self.Layout()
PasswordPanel builds the panel to configure the access to the application and the corresponding keychain
625990653317a56b869bf0e6
class TruckLikeSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> truck = TruckSerializer(read_only=True) <NEW_LINE> location = serializers.SerializerMethodField() <NEW_LINE> def get_location(self, obj): <NEW_LINE> <INDENT> current_location = services.get_current_truck_location(obj.truck) <NEW_LINE> return current_location <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = models.LikedTruck <NEW_LINE> fields = ('truck', 'user', 'location')
For TruckLikes
6259906597e22403b383c653
class AuthProviderResponseError(Error): <NEW_LINE> <INDENT> pass
Error coming from a provider
6259906516aa5153ce401c21
class _IsInert(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def check(self, verb): <NEW_LINE> <INDENT> def verb_decorated(*args, **kwargs): <NEW_LINE> <INDENT> inert_agent = args[0].inert <NEW_LINE> if inert_agent is True: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return verb(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> return verb_decorated
Decorator method for the verbs of the current Agent. If the agent has been declared inert, all verbs are rendered inactive.
625990650a50d4780f706963
class FieldModelBase: <NEW_LINE> <INDENT> pass
Base field Model class
6259906501c39578d7f142d8
class BTSystemConfigInfo(object): <NEW_LINE> <INDENT> openapi_types = { 'value': 'str', 'name': 'str', 'id': 'str', 'href': 'str', 'view_ref': 'str' } <NEW_LINE> attribute_map = { 'value': 'value', 'name': 'name', 'id': 'id', 'href': 'href', 'view_ref': 'viewRef' } <NEW_LINE> def __init__(self, value=None, name=None, id=None, href=None, view_ref=None): <NEW_LINE> <INDENT> self._value = None <NEW_LINE> self._name = None <NEW_LINE> self._id = None <NEW_LINE> self._href = None <NEW_LINE> self._view_ref = None <NEW_LINE> self.discriminator = None <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> if href is not None: <NEW_LINE> <INDENT> self.href = href <NEW_LINE> <DEDENT> if view_ref is not None: <NEW_LINE> <INDENT> self.view_ref = view_ref <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> @property <NEW_LINE> def href(self): <NEW_LINE> <INDENT> return self._href <NEW_LINE> <DEDENT> @href.setter <NEW_LINE> def href(self, href): <NEW_LINE> <INDENT> self._href = href <NEW_LINE> <DEDENT> @property <NEW_LINE> def view_ref(self): <NEW_LINE> <INDENT> return self._view_ref <NEW_LINE> <DEDENT> @view_ref.setter <NEW_LINE> def view_ref(self, view_ref): <NEW_LINE> <INDENT> self._view_ref = view_ref <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, BTSystemConfigInfo): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62599065ac7a0e7691f73c2c
class ValidateUtilities(unittest.TestCase): <NEW_LINE> <INDENT> def test_maps_critical(self): <NEW_LINE> <INDENT> actual = logleveler('critical') <NEW_LINE> expected = 50 <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def test_maps_info(self): <NEW_LINE> <INDENT> actual = logleveler('info') <NEW_LINE> expected = 20 <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def test_maps_debug(self): <NEW_LINE> <INDENT> actual = logleveler('debug') <NEW_LINE> expected = 10 <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def test_maps_warn(self): <NEW_LINE> <INDENT> actual = logleveler('warn') <NEW_LINE> expected = 30 <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def test_maps_warning(self): <NEW_LINE> <INDENT> actual = logleveler('warning') <NEW_LINE> expected = 30 <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def test_maps_error(self): <NEW_LINE> <INDENT> actual = logleveler('error') <NEW_LINE> expected = 40 <NEW_LINE> self.assertEqual(expected, actual) <NEW_LINE> <DEDENT> def test_maps_notset(self): <NEW_LINE> <INDENT> actual = logleveler('notset') <NEW_LINE> expected = 0 <NEW_LINE> self.assertEqual(expected, actual)
test the utilities module
625990653539df3088ecd9e5
class BoardSchema(ma.Schema): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> fields = ('bid','subject','uid','created')
Some info
625990654a966d76dd5f063d
class ESEDBPluginTestCase(test_lib.ParserTestCase): <NEW_LINE> <INDENT> def _ParseESEDBFileWithPlugin( self, path_segments, plugin, knowledge_base_values=None): <NEW_LINE> <INDENT> storage_writer = fake_writer.FakeStorageWriter() <NEW_LINE> storage_writer.Open() <NEW_LINE> file_entry = self._GetTestFileEntry(path_segments) <NEW_LINE> parser_mediator = self._CreateParserMediator( storage_writer, file_entry=file_entry, knowledge_base_values=knowledge_base_values) <NEW_LINE> file_object = file_entry.GetFileObject() <NEW_LINE> database = esedb.ESEDatabase() <NEW_LINE> database.Open(file_object) <NEW_LINE> try: <NEW_LINE> <INDENT> required_tables_exist = plugin.CheckRequiredTables(database) <NEW_LINE> self.assertTrue(required_tables_exist) <NEW_LINE> cache = esedb.ESEDBCache() <NEW_LINE> plugin.Process(parser_mediator, cache=cache, database=database) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> database.Close() <NEW_LINE> <DEDENT> return storage_writer
ESE database-based plugin test case.
625990657c178a314d78e790
class RelatedStudentKeyValue(AbstractApplicationKeyValue, AbstractIsAdmin): <NEW_LINE> <INDENT> relatedstudent = models.ForeignKey(RelatedStudent) <NEW_LINE> student_can_read = models.BooleanField( help_text='Specifies if a student can read the value or not.', default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('relatedstudent', 'application', 'key') <NEW_LINE> app_label = 'core' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def q_is_admin(cls, user_obj): <NEW_LINE> <INDENT> return Q(relatedstudent__period__admins=user_obj) | Q(relatedstudent__period__parentnode__admins=user_obj) | Q(relatedstudent__period__parentnode__parentnode__pk__in=Node._get_nodepks_where_isadmin(user_obj)) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return '{0}: {1}'.format(self.relatedstudent, super(RelatedStudentKeyValue, self).__unicode__())
Key/value pair tied to a specific RelatedStudent.
625990654e4d562566373b4f
class EMBLTableMotor(AbstractMotor): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> AbstractMotor.__init__(self, name) <NEW_LINE> self.direction = None <NEW_LINE> self.socket = None <NEW_LINE> self.enabled = False <NEW_LINE> atexit.register(self.close) <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> self.direction = self.getProperty("direction") <NEW_LINE> self.set_position(0) <NEW_LINE> self.update_state(self.motor_states.READY) <NEW_LINE> self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self.socket.connect(("10.14.90.12", 701)) <NEW_LINE> self.socket.send("enable (0,1,2,3)\r") <NEW_LINE> self.enabled = True <NEW_LINE> <DEDENT> def is_ready(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def connected(self): <NEW_LINE> <INDENT> self.set_ready(True) <NEW_LINE> <DEDENT> def disconnected(self): <NEW_LINE> <INDENT> self.set_ready(False) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.socket.send("disable (0,1,2,3)\r") <NEW_LINE> self.enabled = False <NEW_LINE> <DEDENT> def set_value_relative(self, relative_position, wait=False, timeout=None): <NEW_LINE> <INDENT> self.update_state(self.motor_states.MOVING) <NEW_LINE> if not self.enabled: <NEW_LINE> <INDENT> self.socket.send("enable (0,1,2,3)\r") <NEW_LINE> time.sleep(1) <NEW_LINE> <DEDENT> if self.direction == "vertical": <NEW_LINE> <INDENT> self.socket.send("PTP/r (1), %f\r" % relative_position) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.socket.send("PTP/r (3), %f\r" % relative_position) <NEW_LINE> <DEDENT> self.update_state(self.motor_states.READY) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.socket.close() <NEW_LINE> logging.getLogger("HWR").info("EMBLTableMotor: Socket closed") <NEW_LINE> <DEDENT> except BaseException: <NEW_LINE> <INDENT> logging.getLogger("HWR").error("EMBLTableMotor: Failed to close the socket")
EMBLTableMotor defines socket interface to positioning table
625990653539df3088ecd9e6
@command(server_cmds) <NEW_LINE> class server_delete(_CycladesInit, _ServerWait): <NEW_LINE> <INDENT> arguments = dict( wait=FlagArgument('Wait server to be destroyed', ('-w', '--wait')), cluster=FlagArgument( '(DANGEROUS) Delete all VMs with names starting with the cluster ' 'prefix. Do not use it if unsure. Syntax:' ' kamaki server delete --cluster CLUSTER_PREFIX', '--cluster') ) <NEW_LINE> def _server_ids(self, server_var): <NEW_LINE> <INDENT> if self['cluster']: <NEW_LINE> <INDENT> return [s['id'] for s in self.client.list_servers() if ( s['name'].startswith(server_var))] <NEW_LINE> <DEDENT> return [server_var, ] <NEW_LINE> <DEDENT> @errors.Cyclades.server_id <NEW_LINE> def _delete_server(self, server_id): <NEW_LINE> <INDENT> if self['wait']: <NEW_LINE> <INDENT> details = self.client.get_server_details(server_id) <NEW_LINE> status = details['status'] <NEW_LINE> <DEDENT> self.client.delete_server(server_id) <NEW_LINE> if self['wait']: <NEW_LINE> <INDENT> self.wait(server_id, status) <NEW_LINE> <DEDENT> <DEDENT> @errors.Generic.all <NEW_LINE> @errors.Cyclades.connection <NEW_LINE> def _run(self, server_var): <NEW_LINE> <INDENT> deleted_vms = [] <NEW_LINE> for server_id in self._server_ids(server_var): <NEW_LINE> <INDENT> self._delete_server(server_id=server_id) <NEW_LINE> deleted_vms.append(server_id) <NEW_LINE> <DEDENT> if self['cluster']: <NEW_LINE> <INDENT> dlen = len(deleted_vms) <NEW_LINE> self.error('%s virtual server %s deleted' % ( dlen, '' if dlen == 1 else 's')) <NEW_LINE> <DEDENT> <DEDENT> def main(self, server_id_or_cluster_prefix): <NEW_LINE> <INDENT> super(self.__class__, self)._run() <NEW_LINE> self._run(server_id_or_cluster_prefix)
Delete a virtual server
62599065435de62698e9d551
@python_2_unicode_compatible <NEW_LINE> class Category(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
分类表
62599065a8ecb03325872960
class toXLSNode(NodeWithCtrlWidget): <NEW_LINE> <INDENT> nodeName = "Write XLS" <NEW_LINE> uiTemplate = [ {'name': 'Parameters', 'type': 'group', 'children': [ {'name': 'sheet_name', 'type': 'str', 'value': 'Sheet1', 'default': 'Sheet1', 'tip': '<string, default "Sheet1">\nName of sheet which will contain DataFrame'}, {'name': 'na_rep', 'type': 'str', 'value': "", 'default': "", 'tip': '<string, default "">\nMissing data representation'}, {'name': 'Additional parameters', 'type': 'text', 'value': '#Pass here manually params. For Example:\n#{"float_format": None, "columns": None, "header": True, etc...}\n{}', 'expanded': False} ]}, {'name': 'Copy to\nclipboard', 'type': 'action', 'tip': 'Copy current DataFrame to clipboard, so it can be pasted\nwith CTRL+V into Excel or text-editor'}, {'name': 'Save file', 'type': 'action', 'tip': 'Generate Excel file'}, ] <NEW_LINE> def __init__(self, name, parent=None): <NEW_LINE> <INDENT> super(toXLSNode, self).__init__(name, parent=parent, terminals={'In': {'io': 'in'}}, color=(100, 250, 100, 150)) <NEW_LINE> <DEDENT> def _createCtrlWidget(self, **kwargs): <NEW_LINE> <INDENT> return toXLSNodeCtrlWidget(**kwargs) <NEW_LINE> <DEDENT> def process(self, In): <NEW_LINE> <INDENT> df = In <NEW_LINE> if self._ctrlWidget.saveAllowed(): <NEW_LINE> <INDENT> kwargs = self.ctrlWidget().prepareInputArguments() <NEW_LINE> fileName = QtGui.QFileDialog.getSaveFileName(None, "Save As..", "export.xlsx", "Excel files (*.xls *.xlsx)")[0] <NEW_LINE> if fileName: <NEW_LINE> <INDENT> with BusyCursor(): <NEW_LINE> <INDENT> df.to_excel(fileName, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if self._ctrlWidget.toClipbord(): <NEW_LINE> <INDENT> with BusyCursor(): <NEW_LINE> <INDENT> df.to_clipboard(excel=True) <NEW_LINE> <DEDENT> <DEDENT> return
Write data to spreadsheet or copy to clipboard
6259906529b78933be26ac68
class TTLedDict(dict): <NEW_LINE> <INDENT> def __init__(self, ttl): <NEW_LINE> <INDENT> self.ttl = ttl <NEW_LINE> super(TTLedDict, self).__init__() <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> return super(TTLedDict, self).__setitem__(key, (time.time(), value)) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> insert_date, value = super(TTLedDict, self).__getitem__(key) <NEW_LINE> if insert_date != 0 and insert_date + self.ttl < time.time(): <NEW_LINE> <INDENT> del self[key] <NEW_LINE> raise ExpiredValue(key) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def set_ttl(self, key, ttl): <NEW_LINE> <INDENT> _, value = super(TTLedDict, self).__getitem__(key) <NEW_LINE> super(TTLedDict, self).__setitem__(key, (ttl, value))
A simple TTLed in memory cache. :param ttl: the time-to-leave for records, in seconds. The cache will return an ExpiredValue once the TTL is over for its records, and remove the items from its cache. A ttl of 0 means the record never expires.
6259906592d797404e389701
class Client: <NEW_LINE> <INDENT> __clientId = 0 <NEW_LINE> def __init__(self, name, cnp): <NEW_LINE> <INDENT> self.setName(name) <NEW_LINE> self.setCnp(cnp) <NEW_LINE> self.__Id = Client.__clientId <NEW_LINE> self.__borrowedBooks = list() <NEW_LINE> self.__activity = 0 <NEW_LINE> Client.__clientId += 1 <NEW_LINE> <DEDENT> def setName(self, name): <NEW_LINE> <INDENT> self.__name = name <NEW_LINE> <DEDENT> def changeBorrowedBooks(self, borrowedBooks): <NEW_LINE> <INDENT> self.__borrowedBooks = borrowedBooks <NEW_LINE> <DEDENT> def addBorrowedBook(self, borrowedBook): <NEW_LINE> <INDENT> self.__borrowedBooks.append(borrowedBook) <NEW_LINE> self.__activity += 1 <NEW_LINE> <DEDENT> def removeBorrowedBook(self, borrowedBook): <NEW_LINE> <INDENT> self.__borrowedBooks.pop(self.__borrowedBooks.index(borrowedBook)) <NEW_LINE> <DEDENT> def getBorrowedBooks(self): <NEW_LINE> <INDENT> return self.__borrowedBooks <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.__name <NEW_LINE> <DEDENT> def setCnp(self, cnp): <NEW_LINE> <INDENT> self.__cnp = cnp <NEW_LINE> <DEDENT> def getCnp(self): <NEW_LINE> <INDENT> return self.__cnp <NEW_LINE> <DEDENT> def getId(self): <NEW_LINE> <INDENT> return self.__id <NEW_LINE> <DEDENT> def setId(self, newId): <NEW_LINE> <INDENT> Client.__clientId = newId <NEW_LINE> <DEDENT> def getActivity(self): <NEW_LINE> <INDENT> return self.__activity <NEW_LINE> <DEDENT> def setActivity(self, activity): <NEW_LINE> <INDENT> self.__activity = activity <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.getName() + ", " + self.getCnp() + ", " + str(self.getActivity()) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, self.__class__): <NEW_LINE> <INDENT> return self.getName() == other.getName() and self.getCnp() == other.getCnp() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
This class contains all the information needed to store clients Properties: name (str), cnp (str), id (int, read only), borrowedBooks (list)
6259906556b00c62f0fb4015
class Djs(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'dj' <NEW_LINE> <DEDENT> name = models.CharField( max_length=45, help_text="Public name to be shown for this DJ." ) <NEW_LINE> description = models.TextField( blank=True, help_text="A description to shown on the staff page." ) <NEW_LINE> visible = models.BooleanField( default=False, help_text="Visibility on the staff page." ) <NEW_LINE> priority = models.IntegerField( default=0, help_text="The number we sort by on the staff page, " "higher means closer to the top." ) <NEW_LINE> user = models.OneToOneField(User, related_name='dj_account') <NEW_LINE> image = models.ImageField(upload_to='djs/img') <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('radio_users.staff.views.detail', kwargs={'user': self.name.lower()}) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Table that contains extra information for DJs, it is not required that each user account has one, it's fully optional.
625990657cff6e4e811b718f
class ExponentialIsotropicModel(IsotropicModel): <NEW_LINE> <INDENT> def fit(self, data, mask=None): <NEW_LINE> <INDENT> to_fit = _to_fit_iso(data, self.gtab, mask=mask) <NEW_LINE> nz_idx = to_fit > 0 <NEW_LINE> to_fit[nz_idx] = np.log(to_fit[nz_idx]) <NEW_LINE> to_fit[~nz_idx] = -np.inf <NEW_LINE> p = nanmean(to_fit / self.gtab.bvals[~self.gtab.b0s_mask], -1) <NEW_LINE> params = -p <NEW_LINE> if mask is None: <NEW_LINE> <INDENT> params = np.reshape(params, data.shape[:-1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out_params = np.zeros(data.shape[:-1]) <NEW_LINE> out_params[mask] = params <NEW_LINE> params = out_params <NEW_LINE> <DEDENT> return ExponentialIsotropicFit(self, params)
Representing the isotropic signal as a fit to an exponential decay function with b-values
62599065a17c0f6771d5d74a
class ConversationMessage(object): <NEW_LINE> <INDENT> def __init__(self, sender, convo): <NEW_LINE> <INDENT> self.convo = convo <NEW_LINE> self.sender = sender <NEW_LINE> <DEDENT> def jsonify(self): <NEW_LINE> <INDENT> return {"sender": self.sender, "convo": self.convo}
Represents system events for user's actions inside a conversation
6259906555399d3f05627c68
class Splitter2: <NEW_LINE> <INDENT> def __init__(self, tensors): <NEW_LINE> <INDENT> self.tensors = tensors <NEW_LINE> <DEDENT> def split(self, x): <NEW_LINE> <INDENT> sizes = [int(np.prod(t.shape)) for t in self.tensors] <NEW_LINE> assert sum(sizes) == len(x) <NEW_LINE> split_indices = np.cumsum(sizes)[:-1] <NEW_LINE> return [ xt.reshape(t.shape) for t, xt in zip(self.tensors, np.split(x, split_indices)) ] <NEW_LINE> <DEDENT> def join(self, lst): <NEW_LINE> <INDENT> flattenned = [tf.reshape(x, [-1]) for x in lst] <NEW_LINE> return tf.concat( flattenned, axis=0).numpy() <NEW_LINE> <DEDENT> def current_x(self): <NEW_LINE> <INDENT> return self.join(self.tensors) <NEW_LINE> <DEDENT> def assign_tensors(self, x): <NEW_LINE> <INDENT> for var, val in zip(self.tensors, self.split(x)): <NEW_LINE> <INDENT> var.assign(val)
Class for splitting one numpy vector into many tensors and joining back. It is used if you have a tensorflow function of several tensor variables and the optimalization algorithm was designed to work with real functions accepting one numpy vector as an argument. Thi class is meant to replace `splitter.Splitter` in new tensorflow 2.
625990657d847024c075db20
class TunnelIdentity(IanaInterfaceTypeIdentity): <NEW_LINE> <INDENT> _prefix = 'ianaift' <NEW_LINE> _revision = '2014-05-08' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> IanaInterfaceTypeIdentity.__init__(self) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.ietf._meta import _iana_if_type as meta <NEW_LINE> return meta._meta_table['TunnelIdentity']['meta_info']
Encapsulation interface.
62599065f7d966606f74945e
class ShortInputException(Exception): <NEW_LINE> <INDENT> def __init__(self, length, atleast): <NEW_LINE> <INDENT> Exception.__init__(self) <NEW_LINE> self.length = length <NEW_LINE> self.atleast = atleast
Uma classe exceção definida pelo usuário.
625990658e7ae83300eea7d7
class LighthouseAdapter(Adapter): <NEW_LINE> <INDENT> def __init__(self, name='LighthouseAdapter', log_level=logging.INFO, **kwargs): <NEW_LINE> <INDENT> Adapter.__init__(self, name, **kwargs) <NEW_LINE> self._log_level = log_level <NEW_LINE> logging.basicConfig(level=log_level, **kwargs) <NEW_LINE> self._logger = logging.getLogger('LighthouseAdapter::{}'.format(self._name)) <NEW_LINE> self._aquanaut_cmd = aquanaut_msg_pb2.AquanautCmd() <NEW_LINE> self._actuator_cmd = self._aquanaut_cmd.actuator_commands.add() <NEW_LINE> self._handle = lighthouse_py.NodeHandle('deepdive') <NEW_LINE> self._publisher = self._handle.advertise('actuator_control') <NEW_LINE> <DEDENT> def finalize(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def send_defaults(self, event): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def send_position(self, event): <NEW_LINE> <INDENT> self.reset_message() <NEW_LINE> self._actuator_cmd.name = event.name <NEW_LINE> self._actuator_cmd.position = event.value <NEW_LINE> self._actuator_cmd.mode = aquanaut_msg_pb2.POSITION <NEW_LINE> self.write() <NEW_LINE> <DEDENT> def send_velocity(self, event): <NEW_LINE> <INDENT> self.reset_message() <NEW_LINE> self._actuator_cmd.name = event.name <NEW_LINE> self._actuator_cmd.velocity = event.value <NEW_LINE> self._actuator_cmd.mode = aquanaut_msg_pb2.VELOCITY <NEW_LINE> self.write() <NEW_LINE> <DEDENT> def send_effort(self, event): <NEW_LINE> <INDENT> self.reset_message() <NEW_LINE> self._actuator_cmd.name = event.name <NEW_LINE> self._actuator_cmd.effort = event.value <NEW_LINE> self._actuator_cmd.mode = aquanaut_msg_pb2.EFFORT <NEW_LINE> self.write() <NEW_LINE> <DEDENT> def clear_faults(self, event): <NEW_LINE> <INDENT> self.reset_message() <NEW_LINE> self._actuator_cmd.name = event.name <NEW_LINE> self._actuator_cmd.mode = aquanaut_msg_pb2.CLEAR_FAULTS <NEW_LINE> self.write() <NEW_LINE> <DEDENT> def reset_message(self): <NEW_LINE> <INDENT> self._actuator_cmd.position = 0 <NEW_LINE> self._actuator_cmd.velocity = 0 <NEW_LINE> self._actuator_cmd.effort = 0 <NEW_LINE> <DEDENT> def write(self): <NEW_LINE> <INDENT> print(self._aquanaut_cmd, type(self._aquanaut_cmd)) <NEW_LINE> self._publisher.publish(self._aquanaut_cmd)
An Adapter for Lighthouse.
62599065cc0a2c111447c674
class BaseTinyMCEMemoWidget(BaseDashboardPluginWidget): <NEW_LINE> <INDENT> def render(self, request=None): <NEW_LINE> <INDENT> context = {'plugin': self.plugin} <NEW_LINE> return render_to_string('tinymce/render.html', context)
Base TinyMCE memo plugin widget.
6259906556ac1b37e6303889
class TenantFileSystemFinder(FileSystemFinder): <NEW_LINE> <INDENT> def __init__(self, app_names=None, *args, **kwargs): <NEW_LINE> <INDENT> self._locations = {} <NEW_LINE> self._storages = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def locations(self): <NEW_LINE> <INDENT> if self._locations.get(connection.schema_name, None) is None: <NEW_LINE> <INDENT> schema_locations = [] <NEW_LINE> for root in settings.MULTITENANT_STATICFILES_DIRS: <NEW_LINE> <INDENT> root = utils.parse_tenant_config_path(root) <NEW_LINE> if isinstance(root, (list, tuple)): <NEW_LINE> <INDENT> prefix, root = root <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix = "" <NEW_LINE> <DEDENT> if (prefix, root) not in schema_locations: <NEW_LINE> <INDENT> schema_locations.append((prefix, root)) <NEW_LINE> <DEDENT> <DEDENT> self._locations[connection.schema_name] = schema_locations <NEW_LINE> <DEDENT> return self._locations[connection.schema_name] <NEW_LINE> <DEDENT> @locations.setter <NEW_LINE> def locations(self, value): <NEW_LINE> <INDENT> self._locations[connection.schema_name] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def storages(self): <NEW_LINE> <INDENT> if self._storages.get(connection.schema_name, None) is None: <NEW_LINE> <INDENT> schema_storages = OrderedDict() <NEW_LINE> for prefix, root in self.locations: <NEW_LINE> <INDENT> filesystem_storage = TenantStaticFilesStorage(location=root) <NEW_LINE> filesystem_storage.prefix = prefix <NEW_LINE> schema_storages[root] = filesystem_storage <NEW_LINE> <DEDENT> self._storages[connection.schema_name] = schema_storages <NEW_LINE> <DEDENT> return self._storages[connection.schema_name] <NEW_LINE> <DEDENT> @storages.setter <NEW_LINE> def storages(self, value): <NEW_LINE> <INDENT> self._storages[connection.schema_name] = value <NEW_LINE> <DEDENT> def check(self, **kwargs): <NEW_LINE> <INDENT> errors = super(TenantFileSystemFinder, self).check(**kwargs) <NEW_LINE> multitenant_staticfiles_dirs = settings.MULTITENANT_STATICFILES_DIRS <NEW_LINE> if not isinstance(multitenant_staticfiles_dirs, (list, tuple)): <NEW_LINE> <INDENT> errors.append( Error( "Your MULTITENANT_STATICFILES_DIRS setting is not a tuple or list.", hint="Perhaps you forgot a trailing comma?", ) ) <NEW_LINE> <DEDENT> return errors
A static files finder that uses the ``MULTITENANT_STATICFILES_DIRS`` setting to locate files for different tenants. The only difference between this and the standard FileSystemFinder implementation is that we need to keep references to the storage locations of the static files, as well as maps of dir paths to an appropriate storage instance, for each tenant.
625990651f037a2d8b9e540f
class CheckFailed(Exception): <NEW_LINE> <INDENT> pass
raise this when there's at least one check does not pass
625990653eb6a72ae038bda8
class SearchForm(forms.Form): <NEW_LINE> <INDENT> query = forms.CharField(label='Query', max_length=200, required=False) <NEW_LINE> distance = forms.FloatField(label='Distance', max_value=200, required=False, initial=3) <NEW_LINE> title = forms.CharField(label='Title', max_length=200, widget=forms.TextInput(attrs={"onkeyup" : "searchOpen('title')"}), required=False) <NEW_LINE> year = forms.IntegerField(label='Year', max_value=2020, min_value=1880, required=False, error_messages = {'min_value': "Cinematography was only invented in 1880's."}) <NEW_LINE> production_company = forms.CharField(label='Production Company', max_length=200, widget=forms.TextInput(attrs={"onkeyup" : "searchOpen('production_company')"}), required=False) <NEW_LINE> distributor = forms.CharField(label='Distributor', max_length=200, widget=forms.TextInput(attrs={"onkeyup" : "searchOpen('distributor')"}), required=False) <NEW_LINE> director = forms.CharField(label='Director', max_length=200, widget=forms.TextInput(attrs={"onkeyup" : "searchOpen('director')"}), required=False) <NEW_LINE> writer = forms.CharField(label='Writer', max_length=200, widget=forms.TextInput(attrs={"onkeyup" : "searchOpen('writer')"}), required=False) <NEW_LINE> actor = forms.CharField(label='Actor', max_length=200, widget=forms.TextInput(attrs={"onkeyup" : "searchOpen('actor')"}), required=False) <NEW_LINE> def clean(self): <NEW_LINE> <INDENT> is_address_filled = self.cleaned_data.get('query', False) <NEW_LINE> if is_address_filled: <NEW_LINE> <INDENT> distance = self.cleaned_data.get('distance', None) <NEW_LINE> if distance is None: <NEW_LINE> <INDENT> self._errors['distance'] = self.error_class([ 'Distance is required when you search by address']) <NEW_LINE> <DEDENT> <DEDENT> return self.cleaned_data
Form for all search fields.
6259906599fddb7c1ca63973
class TestDocsModelBT_array_tuple(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.docs_model = docsmodel.DocsModel() <NEW_LINE> self.schema = docsmodel.get_schema_from_file( 'test_schemas/array_tuple.json') <NEW_LINE> <DEDENT> def test_entries_count(self): <NEW_LINE> <INDENT> entries = self.docs_model.parse(self.schema) <NEW_LINE> self.assertEqual(4, len(entries)) <NEW_LINE> <DEDENT> def test_root(self): <NEW_LINE> <INDENT> entries = self.docs_model.parse(self.schema) <NEW_LINE> root_entry = entries[0] <NEW_LINE> good_root = docsmodel.ParsedItem( { "title": "array_tuple", "description": "Example schema description.", "type": "array" }, 'root', True, 0) <NEW_LINE> good_root['uniqueItems'] = False <NEW_LINE> good_root['minItems'] = None <NEW_LINE> good_root['maxItems'] = None <NEW_LINE> self.assertEqual(root_entry, good_root) <NEW_LINE> <DEDENT> def test_other_entries(self): <NEW_LINE> <INDENT> entries = self.docs_model.parse(self.schema) <NEW_LINE> good_entries = list() <NEW_LINE> good_entries.append(docsmodel.ParsedItem( { 'title': '', 'description': '', 'type': 'number' }, 'array item 0', False, 1)) <NEW_LINE> good_entries[0]['maximum'] = '+inf' <NEW_LINE> good_entries[0]['minimum'] = '-inf' <NEW_LINE> good_entries[0]['exclusiveMinimum'] = False <NEW_LINE> good_entries[0]['exclusiveMaximum'] = False <NEW_LINE> good_entries.append(docsmodel.ParsedItem( { 'title': '', 'description': '', 'type': 'string' }, 'array item 1', False, 1)) <NEW_LINE> good_entries[1]['minLength'] = None <NEW_LINE> good_entries[1]['maxLength'] = None <NEW_LINE> good_entries[1]['pattern'] = None <NEW_LINE> good_entries[1]['format'] = None <NEW_LINE> good_entries.append(docsmodel.ParsedItem( { 'title': '', 'description': '', 'type': 'boolean' }, 'array item 2', False, 1)) <NEW_LINE> self.assertEqual(good_entries[0], entries[1]) <NEW_LINE> self.assertEqual(good_entries[1], entries[2]) <NEW_LINE> self.assertEqual(good_entries[2], entries[3])
Tests array tuple typing.
62599065f548e778e596ccd3
class RemoveIamPolicyBinding(base.Command): <NEW_LINE> <INDENT> detailed_help = iam_util.GetDetailedHelpForRemoveIamPolicyBinding( 'dataset', '1000') <NEW_LINE> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> parser.add_argument('id', type=str, help='The ID of the dataset.') <NEW_LINE> iam_util.AddArgsForRemoveIamPolicyBinding(parser) <NEW_LINE> <DEDENT> @genomics_util.ReraiseHttpException <NEW_LINE> def Run(self, args): <NEW_LINE> <INDENT> apitools_client = self.context[lib.GENOMICS_APITOOLS_CLIENT_KEY] <NEW_LINE> messages = self.context[lib.GENOMICS_MESSAGES_MODULE_KEY] <NEW_LINE> resources = self.context[lib.GENOMICS_RESOURCES_KEY] <NEW_LINE> dataset_resource = resources.Parse(args.id, collection='genomics.datasets') <NEW_LINE> policy_request = messages.GenomicsDatasetsGetIamPolicyRequest( resource='datasets/{0}'.format(dataset_resource.Name()), getIamPolicyRequest=messages.GetIamPolicyRequest(), ) <NEW_LINE> policy = apitools_client.datasets.GetIamPolicy(policy_request) <NEW_LINE> iam_util.RemoveBindingFromIamPolicy(policy, args) <NEW_LINE> policy_request = messages.GenomicsDatasetsSetIamPolicyRequest( resource='datasets/{0}'.format(dataset_resource.Name()), setIamPolicyRequest=messages.SetIamPolicyRequest(policy=policy), ) <NEW_LINE> return apitools_client.datasets.SetIamPolicy(policy_request) <NEW_LINE> <DEDENT> def Display(self, args, result): <NEW_LINE> <INDENT> self.format(result)
Remove IAM policy binding for a dataset. This command removes a policy binding to the IAM policy of a dataset, given a dataset ID and the binding.
62599065e76e3b2f99fda149
@six.add_metaclass(abc.ABCMeta) <NEW_LINE> class ConstrainedMinimizationProblem(object): <NEW_LINE> <INDENT> @abc.abstractproperty <NEW_LINE> def objective(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_constraints(self): <NEW_LINE> <INDENT> constraints_shape = self.constraints.get_shape() <NEW_LINE> if self.proxy_constraints is None: <NEW_LINE> <INDENT> proxy_constraints_shape = constraints_shape <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> proxy_constraints_shape = self.proxy_constraints.get_shape() <NEW_LINE> <DEDENT> if (constraints_shape is None or proxy_constraints_shape is None or any([ii is None for ii in constraints_shape.as_list()]) or any([ii is None for ii in proxy_constraints_shape.as_list()])): <NEW_LINE> <INDENT> raise ValueError( "constraints and proxy_constraints must have fully-known shapes") <NEW_LINE> <DEDENT> if constraints_shape != proxy_constraints_shape: <NEW_LINE> <INDENT> raise ValueError( "constraints and proxy_constraints must have the same shape") <NEW_LINE> <DEDENT> size = 1 <NEW_LINE> for ii in constraints_shape.as_list(): <NEW_LINE> <INDENT> size *= ii <NEW_LINE> <DEDENT> return int(size) <NEW_LINE> <DEDENT> @abc.abstractproperty <NEW_LINE> def constraints(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def proxy_constraints(self): <NEW_LINE> <INDENT> return None
Abstract class representing a `ConstrainedMinimizationProblem`. A ConstrainedMinimizationProblem consists of an objective function to minimize, and a set of constraint functions that are constrained to be nonpositive. In addition to the constraint functions, there may (optionally) be proxy constraint functions: a ConstrainedOptimizer will attempt to penalize these proxy constraint functions so as to satisfy the (non-proxy) constraints. Proxy constraints could be used if the constraints functions are difficult or impossible to optimize (e.g. if they're piecewise constant), in which case the proxy constraints should be some approximation of the original constraints that is well-enough behaved to permit successful optimization.
625990652c8b7c6e89bd4f38
@keras_export('keras.metrics.CategoricalCrossentropy') <NEW_LINE> class CategoricalCrossentropy(MeanMetricWrapper): <NEW_LINE> <INDENT> def __init__(self, name='categorical_crossentropy', dtype=None, from_logits=False, label_smoothing=0): <NEW_LINE> <INDENT> super(CategoricalCrossentropy, self).__init__( categorical_crossentropy, name, dtype=dtype, from_logits=from_logits, label_smoothing=label_smoothing)
Computes the crossentropy metric between the labels and predictions. This is the crossentropy metric class to be used when there are multiple label classes (2 or more). Here we assume that labels are given as a `one_hot` representation. eg., When labels values are [2, 0, 1], `y_true` = [[0, 0, 1], [1, 0, 0], [0, 1, 0]]. Args: name: (Optional) string name of the metric instance. dtype: (Optional) data type of the metric result. from_logits: (Optional) Whether output is expected to be a logits tensor. By default, we consider that output encodes a probability distribution. label_smoothing: (Optional) Float in [0, 1]. When > 0, label values are smoothed, meaning the confidence on label values are relaxed. e.g. `label_smoothing=0.2` means that we will use a value of `0.1` for label `0` and `0.9` for label `1`" Usage: >>> # EPSILON = 1e-7, y = y_true, y` = y_pred >>> # y` = clip_ops.clip_by_value(output, EPSILON, 1. - EPSILON) >>> # y` = [[0.05, 0.95, EPSILON], [0.1, 0.8, 0.1]] >>> # xent = -sum(y * log(y'), axis = -1) >>> # = -((log 0.95), (log 0.1)) >>> # = [0.051, 2.302] >>> # Reduced xent = (0.051 + 2.302) / 2 >>> m = tf.keras.metrics.CategoricalCrossentropy() >>> _ = m.update_state([[0, 1, 0], [0, 0, 1]], ... [[0.05, 0.95, 0], [0.1, 0.8, 0.1]]) >>> m.result().numpy() 1.1769392 >>> m.reset_states() >>> _ = m.update_state([[0, 1, 0], [0, 0, 1]], ... [[0.05, 0.95, 0], [0.1, 0.8, 0.1]], ... sample_weight=tf.constant([0.3, 0.7])) >>> m.result().numpy() 1.6271976 Usage with tf.keras API: ```python model = tf.keras.Model(inputs, outputs) model.compile( 'sgd', loss='mse', metrics=[tf.keras.metrics.CategoricalCrossentropy()]) ```
62599065d6c5a102081e386f
class RichTextSep (BaseWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> BaseWidget.__init__(self) <NEW_LINE> self._sep = gtk.HSeparator() <NEW_LINE> self.add(self._sep) <NEW_LINE> self._size = None <NEW_LINE> self._sep.modify_bg(gtk.STATE_NORMAL, gdk.Color(* DEFAULT_HR_COLOR)) <NEW_LINE> self._sep.modify_fg(gtk.STATE_NORMAL, gdk.Color(* DEFAULT_HR_COLOR)) <NEW_LINE> self.connect("size-request", self._on_resize) <NEW_LINE> self.connect("parent-set", self._on_parent_set) <NEW_LINE> self._resizes_id = None <NEW_LINE> <DEDENT> def _on_parent_set(self, widget, old_parent): <NEW_LINE> <INDENT> if old_parent: <NEW_LINE> <INDENT> old_parent.disconnect(self._resize_id) <NEW_LINE> <DEDENT> if self.get_parent(): <NEW_LINE> <INDENT> self._resize_id = self.get_parent().connect("size-allocate", self._on_size_change) <NEW_LINE> <DEDENT> <DEDENT> def _on_size_change(self, widget, req): <NEW_LINE> <INDENT> w, h = self.get_desired_size() <NEW_LINE> self.set_size_request(w, h) <NEW_LINE> <DEDENT> def _on_resize(self, sep, req): <NEW_LINE> <INDENT> w, h = self.get_desired_size() <NEW_LINE> req.width = w <NEW_LINE> req.height = h <NEW_LINE> <DEDENT> def get_desired_size(self): <NEW_LINE> <INDENT> HR_HORIZONTAL_MARGIN = 20 <NEW_LINE> HR_VERTICAL_MARGIN = 10 <NEW_LINE> self._size = (self.get_parent().get_allocation().width - HR_HORIZONTAL_MARGIN, HR_VERTICAL_MARGIN) <NEW_LINE> return self._size
Separator widget for a Horizontal Rule
62599065e5267d203ee6cf63
class ChangeEmail(LoginRequiredMixin, FormView): <NEW_LINE> <INDENT> template_name = 'registration/change_email_form.html' <NEW_LINE> form_class = ChangeEmailForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> new_email = form.cleaned_data['email'] <NEW_LINE> current_site = get_current_site(self.request) <NEW_LINE> domain = current_site.domain <NEW_LINE> context = { 'protocol': 'https' if self.request.is_secure() else 'http', 'domain': domain, 'param': dumps(user.pk), 'token': dumps(new_email), 'timeout': int(getattr(settings, 'ACTIVATION_TIMEOUT_SECONDS', 60*10)) // 60, 'user': user, } <NEW_LINE> subject = render_to_string('registration/mail_template/change_email/subject.txt', context) <NEW_LINE> message = render_to_string('registration/mail_template/change_email/message.txt', context) <NEW_LINE> send_mail(subject, message, None, [new_email]) <NEW_LINE> return redirect('registration:change_email_done')
Change Email address
62599065435de62698e9d552
class GetJobResponse: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'result': 'GetJobResult', 'status': 'str', 'error_message': 'str' } <NEW_LINE> self.result = None <NEW_LINE> self.status = None <NEW_LINE> self.error_message = None
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599065460517430c432bf9
class DownloadRawData(luigi.Task): <NEW_LINE> <INDENT> def requires(self): <NEW_LINE> <INDENT> return FetchFileTask( 'https://data.buenosaires.gob.ar/api/datasets/HJ8rdKWmJl/download', os.path.join(data_dir, 'raw', 'trajectories.zip') ) <NEW_LINE> <DEDENT> def output(self): <NEW_LINE> <INDENT> return luigi.LocalTarget(os.path.join(data_dir, 'raw', 'bicicletas-publicas')) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> zip = zipfile.ZipFile(self.input().path, 'r') <NEW_LINE> zip.extractall(os.path.join(data_dir, 'raw')) <NEW_LINE> zip.close()
Downloads the trajectories and stations data from https://data.buenosaires.gob.ar/dataset/bicicletas-publicas
62599065796e427e5384fec0
class BilinearSeqAttn(nn.Module): <NEW_LINE> <INDENT> def __init__(self, x_size, y_size, identity=False): <NEW_LINE> <INDENT> super(BilinearSeqAttn, self).__init__() <NEW_LINE> if not identity: <NEW_LINE> <INDENT> self.linear = nn.Linear(y_size, x_size) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.linear = None <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x, y, x_mask, no_softmax=False): <NEW_LINE> <INDENT> Wy = self.linear(y) if self.linear is not None else y <NEW_LINE> xWy = x.bmm(Wy.unsqueeze(2)).squeeze(2) <NEW_LINE> if no_softmax: <NEW_LINE> <INDENT> return xWy <NEW_LINE> <DEDENT> if self.training: <NEW_LINE> <INDENT> alpha = F.log_softmax(xWy) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> alpha = F.softmax(xWy) <NEW_LINE> <DEDENT> return alpha
A bilinear attention layer over a sequence X w.r.t y: * o_i = softmax(x_i'Wy) for x_i in X. Optionally don't normalize output weights.
625990653539df3088ecd9e8
class InverseSparseDiscreteBoundaryOperator(_LinearOperator): <NEW_LINE> <INDENT> class _Solver(object): <NEW_LINE> <INDENT> def __init__(self, operator): <NEW_LINE> <INDENT> from scipy.sparse import csc_matrix <NEW_LINE> if isinstance(operator, SparseDiscreteBoundaryOperator): <NEW_LINE> <INDENT> mat = operator.sparse_operator <NEW_LINE> <DEDENT> elif isinstance(operator, csc_matrix): <NEW_LINE> <INDENT> mat = operator <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("op must be either of type " + "SparseDiscreteBoundaryOperator or of type csc_matrix. Actual type: " + str(type(operator))) <NEW_LINE> <DEDENT> from scipy.sparse.linalg import splu <NEW_LINE> self._solve_fun = None <NEW_LINE> self._shape = (mat.shape[1], mat.shape[0]) <NEW_LINE> self._dtype = mat.dtype <NEW_LINE> if mat.shape[0] == mat.shape[1]: <NEW_LINE> <INDENT> solver = splu(mat) <NEW_LINE> self._solve_fun = solver.solve <NEW_LINE> <DEDENT> elif mat.shape[0] > mat.shape[1]: <NEW_LINE> <INDENT> mat_hermitian = mat.conjugate().transpose() <NEW_LINE> solver = splu((mat_hermitian*mat).tocsc()) <NEW_LINE> self._solve_fun = lambda x: solver.solve(mat_hermitian*x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mat_hermitian = mat.conjugate().transpose() <NEW_LINE> solver = splu((mat*mat_hermitian).tocsc()) <NEW_LINE> self._solve_fun = lambda x: mat_hermitian*solver.solve(x) <NEW_LINE> <DEDENT> <DEDENT> def solve(self, vec): <NEW_LINE> <INDENT> if self._dtype == 'float64' and _np.iscomplexobj(vec): <NEW_LINE> <INDENT> return self.solve(_np.real(vec))+1j*self.solve(_np.imag(vec)) <NEW_LINE> <DEDENT> result = self._solve_fun(vec.squeeze()) <NEW_LINE> if vec.ndim > 1: <NEW_LINE> <INDENT> return result.reshape(self.shape[0], 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def shape(self): <NEW_LINE> <INDENT> return self._shape <NEW_LINE> <DEDENT> @property <NEW_LINE> def dtype(self): <NEW_LINE> <INDENT> return self._dtype <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, operator): <NEW_LINE> <INDENT> self._solver = InverseSparseDiscreteBoundaryOperator._Solver(operator) <NEW_LINE> super(InverseSparseDiscreteBoundaryOperator, self).__init__( dtype=self._solver.dtype, shape=self._solver.shape) <NEW_LINE> <DEDENT> def _matvec(self, vec): <NEW_LINE> <INDENT> return self._solver.solve(vec)
Apply the (pseudo-)inverse of a sparse operator. This class uses a Sparse LU-Decomposition (in the case of a square matrix) or a sparse normal equation to provide the application of an inverse to a sparse operator. This class derives from :class:`scipy.sparse.linalg.interface.LinearOperator` and thereby implements the SciPy LinearOperator protocol. Parameters ---------- operator : bempp.api.SparseDiscreteBoundaryOperator or scipy.sparse.csc_matrix Sparse operator to be inverted.
6259906538b623060ffaa3f6
class AttachmentHandler(BaseHandler): <NEW_LINE> <INDENT> @require_permission(BaseHandler.PERMISSION_ALL) <NEW_LINE> def delete(self, task_id, attachment_id): <NEW_LINE> <INDENT> attachment = self.safe_get_item(Attachment, attachment_id) <NEW_LINE> task = self.safe_get_item(Task, task_id) <NEW_LINE> if attachment.task is not task: <NEW_LINE> <INDENT> raise tornado.web.HTTPError(404) <NEW_LINE> <DEDENT> self.sql_session.delete(attachment) <NEW_LINE> self.try_commit() <NEW_LINE> self.write("%s" % task.id)
Delete an attachment.
62599065adb09d7d5dc0bcb4
class AUPhoneNumberField(CharField): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': 'Phone numbers must contain 10 digits.', } <NEW_LINE> def clean(self, value): <NEW_LINE> <INDENT> super(AUPhoneNumberField, self).clean(value) <NEW_LINE> if value in EMPTY_VALUES: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> value = re.sub('(\(|\)|\s+|-)', '', force_text(value)) <NEW_LINE> phone_match = PHONE_DIGITS_RE.search(value) <NEW_LINE> if phone_match: <NEW_LINE> <INDENT> return '%s' % phone_match.group(1) <NEW_LINE> <DEDENT> raise ValidationError(self.error_messages['invalid'])
A form field that validates input as an Australian phone number. Valid numbers have ten digits.
625990652ae34c7f260ac831
class SelectCsvGeneratorForm(forms.Form): <NEW_LINE> <INDENT> generator = forms.ModelChoiceField(queryset=CsvGenerator.objects.none()) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> generators = kwargs.pop('generators') <NEW_LINE> super(SelectCsvGeneratorForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['generator'].queryset = generators
Form class for selecting a csv generator
625990657cff6e4e811b7191
class LinearBanditVariableCollection(tf.Module): <NEW_LINE> <INDENT> def __init__(self, context_dim, num_models, use_eigendecomp=False, dtype=tf.float32, name=None): <NEW_LINE> <INDENT> tf.Module.__init__(self, name=name) <NEW_LINE> self.cov_matrix_list = [] <NEW_LINE> self.data_vector_list = [] <NEW_LINE> self.eig_matrix_list = [] <NEW_LINE> self.eig_vals_list = [] <NEW_LINE> self.num_samples_list = [] <NEW_LINE> for k in range(num_models): <NEW_LINE> <INDENT> self.cov_matrix_list.append( tf.compat.v2.Variable( tf.zeros([context_dim, context_dim], dtype=dtype), name='a_' + str(k))) <NEW_LINE> self.data_vector_list.append( tf.compat.v2.Variable( tf.zeros(context_dim, dtype=dtype), name='b_{}'.format(k))) <NEW_LINE> self.num_samples_list.append( tf.compat.v2.Variable( tf.zeros([], dtype=dtype), name='num_samples_{}'.format(k))) <NEW_LINE> if use_eigendecomp: <NEW_LINE> <INDENT> self.eig_matrix_list.append( tf.compat.v2.Variable( tf.eye(context_dim, dtype=dtype), name='eig_matrix{}'.format(k))) <NEW_LINE> self.eig_vals_list.append( tf.compat.v2.Variable( tf.ones([context_dim], dtype=dtype), name='eig_vals{}'.format(k))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.eig_matrix_list.append( tf.compat.v2.Variable( tf.constant([], dtype=dtype), name='eig_matrix{}'.format(k))) <NEW_LINE> self.eig_vals_list.append( tf.compat.v2.Variable( tf.constant([], dtype=dtype), name='eig_vals{}'.format(k)))
A collection of variables used by `LinearBanditAgent`.
625990658a43f66fc4bf38da
class LabelAction(Action): <NEW_LINE> <INDENT> TYPE = "label" <NEW_LINE> def __init__(self, label): <NEW_LINE> <INDENT> self.label = label <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_json(cls, json_obj, context): <NEW_LINE> <INDENT> return cls(Label.objects.get(org=context.org, pk=json_obj["label"])) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return {"type": self.TYPE, "label": self.label.pk} <NEW_LINE> <DEDENT> def get_description(self): <NEW_LINE> <INDENT> return "apply label '%s'" % self.label.name <NEW_LINE> <DEDENT> def apply_to(self, org, messages): <NEW_LINE> <INDENT> for msg in messages: <NEW_LINE> <INDENT> msg.label(self.label) <NEW_LINE> <DEDENT> if self.label.is_synced: <NEW_LINE> <INDENT> org.get_backend().label_messages(org, messages, self.label) <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.TYPE == other.TYPE and self.label == other.label <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.TYPE + str(self.label.pk))
Adds a label to the message
62599065fff4ab517ebcef66
class SubmittedFile(UrlMixin, models.Model): <NEW_LINE> <INDENT> submission = models.ForeignKey(Submission, verbose_name=_('LABEL_SUBMISSION'), on_delete=models.CASCADE, related_name="files", ) <NEW_LINE> param_name = models.CharField( verbose_name=_('LABEL_PARAM_NAME'), max_length=128, ) <NEW_LINE> file_object = models.FileField( verbose_name=('LABEL_FILE_OBJECT'), upload_to=build_upload_dir, max_length=255, ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('MODEL_NAME_SUBMITTED_FILE') <NEW_LINE> verbose_name_plural = _('MODEL_NAME_SUBMITTED_FILE_PLURAL') <NEW_LINE> app_label = 'exercise' <NEW_LINE> <DEDENT> @property <NEW_LINE> def filename(self): <NEW_LINE> <INDENT> return os.path.basename(self.file_object.path) <NEW_LINE> <DEDENT> @property <NEW_LINE> def exists(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return bool(self.file_object.size) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_mime(self): <NEW_LINE> <INDENT> return guess_type(self.file_object.path)[0] <NEW_LINE> <DEDENT> def is_passed(self): <NEW_LINE> <INDENT> return is_binary(self.file_object.path) <NEW_LINE> <DEDENT> ABSOLUTE_URL_NAME = "submission-file" <NEW_LINE> def get_url_kwargs(self): <NEW_LINE> <INDENT> return dict( file_id=self.id, file_name=self.filename, **self.submission.get_url_kwargs())
Represents a file submitted by the student as a solution to an exercise. Submitted files are always linked to a certain submission through a foreign key relation. The files are stored on the disk while models are stored in the database.
62599065aad79263cf42ff0a
class ICheckinEvent( IObjectEvent ): <NEW_LINE> <INDENT> baseline = Attribute("The Working Copy's baseline") <NEW_LINE> relation = Attribute("The Working Copy Archetypes Relation Object") <NEW_LINE> checkin_message = Attribute("checkin message")
a working copy is being checked in, event.object is the working copy, this message is sent before any mutation/merge has been done on the objects
625990658e7ae83300eea7d8
class Category(models.Model): <NEW_LINE> <INDENT> category = models.CharField(max_length=100) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.category
Book Categories
625990658e71fb1e983bd210
class UploadFile(View): <NEW_LINE> <INDENT> def post(self, request): <NEW_LINE> <INDENT> upload_file = request.FILES.get('img') <NEW_LINE> file_name_last = upload_file.name.split('.').pop() <NEW_LINE> file_name = 'IMG_{}.{}'.format(datetime.now().strftime('%y%m%d%H%M%S'), file_name_last) + '.' <NEW_LINE> with open(settings.MEDIA_ROOT + '/img/' + file_name, 'wb') as file: <NEW_LINE> <INDENT> for chunk in upload_file.chunks(): <NEW_LINE> <INDENT> file.write(chunk) <NEW_LINE> <DEDENT> <DEDENT> return render(request, 'day02.html') <NEW_LINE> <DEDENT> def upload_file(self, request): <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> pass
前端表单 必须是POST请求 类型必须是 ENCTYPE=multipart/form-data input type = 'file' 如果发现 request.files 是为空 检查是否是post请求
625990651f037a2d8b9e5410
class UserAndDiscussionViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = UserAndDiscussion.objects.all() <NEW_LINE> serializer_class = UserAndDiscussionSerializer <NEW_LINE> filter_backends = [DjangoFilterBackend] <NEW_LINE> filter_fields = ['agree', 'time_Of_Activity'] <NEW_LINE> permission_classes = (UserOCRAW,) <NEW_LINE> @list_route(methods=['post'], url_path='do-undo') <NEW_LINE> def do_undo(self, request): <NEW_LINE> <INDENT> if 'username' in request.GET: <NEW_LINE> <INDENT> user = authenticate(username=request.GET['username'], password=request.GET['password']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user = request.user <NEW_LINE> <DEDENT> if request.POST.get('agree', False) and request.POST.get('discussion', False): <NEW_LINE> <INDENT> newJoin, created = UserAndDiscussion.objects.get_or_create( user=user, discussion_id=int(request.POST['discussion']), agree=bool(request.POST['agree']) ) <NEW_LINE> if created: <NEW_LINE> <INDENT> return Response({"Details": "activity done"}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> newJoin.delete() <NEW_LINE> return Response({"Details": "activity undone"}) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return Response({"Details": "Bad Request :("})
API endpoint that allows users to be viewed or edited.
625990653eb6a72ae038bdaa
class PythonAPM(object): <NEW_LINE> <INDENT> def __init__(self, app, surfacers=Surfacers(LogSurfacer(),)): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.surfacers = surfacers <NEW_LINE> self.request_time = Histogram( 'pythonapm.http.request.time_microseconds', surfacers=self.surfacers, ) <NEW_LINE> self.rss_diff = Gauge( 'pythonapm.http.request.rss.diff.bytes', surfacers=self.surfacers, ) <NEW_LINE> self.request_data = { 'request_start_time': None, 'request_start_rss': None, } <NEW_LINE> self.init_apm(app) <NEW_LINE> <DEDENT> def init_apm(self, app): <NEW_LINE> <INDENT> self.register_signals(app) <NEW_LINE> app.after_request(self.decorate_response) <NEW_LINE> <DEDENT> def register_signals(self, app): <NEW_LINE> <INDENT> signals.got_request_exception.connect( self.handle_exception, sender=app, weak=False) <NEW_LINE> signals.request_started.connect(self.request_started, sender=app) <NEW_LINE> signals.request_finished.connect(self.request_finished, sender=app) <NEW_LINE> <DEDENT> def decorate_response(self, response): <NEW_LINE> <INDENT> response.headers['dm03514/pythonapm'] = uuid.uuid4() <NEW_LINE> return response <NEW_LINE> <DEDENT> def handle_exception(self, *args, **kwargs): <NEW_LINE> <INDENT> self.surfacers.flush() <NEW_LINE> <DEDENT> def request_started(self, *args, **kwargs): <NEW_LINE> <INDENT> logger.debug('request_started') <NEW_LINE> self.surfacers.clear() <NEW_LINE> self.request_data['request_start_time'] = datetime.utcnow() <NEW_LINE> self.request_data['request_start_rss'] = psutil.Process(os.getpid()).memory_info().rss <NEW_LINE> <DEDENT> def request_finished(self, *args, **kwargs): <NEW_LINE> <INDENT> logger.debug('request_finished') <NEW_LINE> self.observe_request_time() <NEW_LINE> self.set_request_rss_diff() <NEW_LINE> self.surfacers.flush() <NEW_LINE> <DEDENT> def observe_request_time(self): <NEW_LINE> <INDENT> diff = datetime.utcnow() - self.request_data['request_start_time'] <NEW_LINE> self.request_time.observe(diff.microseconds) <NEW_LINE> <DEDENT> def set_request_rss_diff(self): <NEW_LINE> <INDENT> diff = psutil.Process(os.getpid()).memory_info().rss - self.request_data['request_start_rss'] <NEW_LINE> self.rss_diff.set(diff)
Instruments flask applications, exposes a number of configurable metrics.
62599065a219f33f346c7f52
@utils.registerPlexObject <NEW_LINE> class Album(Audio): <NEW_LINE> <INDENT> TAG = 'Directory' <NEW_LINE> TYPE = 'album' <NEW_LINE> def __iter__(self): <NEW_LINE> <INDENT> for track in self.tracks(): <NEW_LINE> <INDENT> yield track <NEW_LINE> <DEDENT> <DEDENT> def _loadData(self, data): <NEW_LINE> <INDENT> Audio._loadData(self, data) <NEW_LINE> self.art = data.attrib.get('art') <NEW_LINE> self.key = self.key.replace('/children', '') <NEW_LINE> self.originallyAvailableAt = utils.toDatetime(data.attrib.get('originallyAvailableAt'), '%Y-%m-%d') <NEW_LINE> self.parentKey = data.attrib.get('parentKey') <NEW_LINE> self.parentRatingKey = data.attrib.get('parentRatingKey') <NEW_LINE> self.parentThumb = data.attrib.get('parentThumb') <NEW_LINE> self.parentTitle = data.attrib.get('parentTitle') <NEW_LINE> self.studio = data.attrib.get('studio') <NEW_LINE> self.year = utils.cast(int, data.attrib.get('year')) <NEW_LINE> self.genres = self.findItems(data, media.Genre) <NEW_LINE> self.collections = self.findItems(data, media.Collection) <NEW_LINE> self.labels = self.findItems(data, media.Label) <NEW_LINE> <DEDENT> def track(self, title): <NEW_LINE> <INDENT> key = '%s/children' % self.key <NEW_LINE> return self.fetchItem(key, title__iexact=title) <NEW_LINE> <DEDENT> def tracks(self, **kwargs): <NEW_LINE> <INDENT> key = '%s/children' % self.key <NEW_LINE> return self.fetchItems(key, **kwargs) <NEW_LINE> <DEDENT> def get(self, title): <NEW_LINE> <INDENT> return self.track(title) <NEW_LINE> <DEDENT> def artist(self): <NEW_LINE> <INDENT> return self.fetchItem(self.parentKey) <NEW_LINE> <DEDENT> def download(self, savepath=None, keep_original_name=False, **kwargs): <NEW_LINE> <INDENT> filepaths = [] <NEW_LINE> for track in self.tracks(): <NEW_LINE> <INDENT> filepaths += track.download(savepath, keep_original_name, **kwargs) <NEW_LINE> <DEDENT> return filepaths <NEW_LINE> <DEDENT> def _defaultSyncTitle(self): <NEW_LINE> <INDENT> return '%s - %s' % (self.parentTitle, self.title)
Represents a single audio album. Attributes: TAG (str): 'Directory' TYPE (str): 'album' art (str): Album artwork (/library/metadata/<ratingkey>/art/<artid>) genres (list): List of :class:`~plexapi.media.Genre` objects this album respresents. key (str): API URL (/library/metadata/<ratingkey>). originallyAvailableAt (datetime): Datetime this album was released. parentKey (str): API URL of this artist. parentRatingKey (int): Unique key identifying artist. parentThumb (str): URL to artist thumbnail image. parentTitle (str): Name of the artist for this album. studio (str): Studio that released this album. year (int): Year this album was released.
625990658da39b475be04935
class ImportarCategoriaForm(FlaskForm): <NEW_LINE> <INDENT> submit = SubmitField('Importar')
Formulario para importar as categorias dos produtos categorias do site
62599065462c4b4f79dbd151
class InvertedConstantBias(PhotozNoiseAddition): <NEW_LINE> <INDENT> def __init__(self,bias): <NEW_LINE> <INDENT> self.bias = bias <NEW_LINE> self.can_be_neg = False <NEW_LINE> if bias>0: <NEW_LINE> <INDENT> self.can_be_neg = True <NEW_LINE> <DEDENT> <DEDENT> def __call__(self,zspec,map_id,bin_num): <NEW_LINE> <INDENT> return (zspec-self.bias)/(1. + self.bias)
Assumes that noise photoz noise is modelled as bph(zs) = bias * (1+zs) This function assumes that we are given the photometric redshift (with only a constant bias) and we want to get the spectroscopic redshift back. zp = bph + zs -> zp = bias*(1+zs) + zs -> zp = bias+(1+bias)*zs zs*(1+bias) = zp - bias zs = (zp-bias)/(1+bias)
62599065d268445f2663a702
class ApplicationContext(object): <NEW_LINE> <INDENT> def __init__(self, solvers): <NEW_LINE> <INDENT> self.dijsktra_dist = None <NEW_LINE> self.dijsktra_preds = None <NEW_LINE> self.running = False <NEW_LINE> self.paused = True <NEW_LINE> self.num_ants = 1000 <NEW_LINE> self.show_grid = False <NEW_LINE> self.show_grid_lines = False <NEW_LINE> self.graph = None <NEW_LINE> self.colony = None <NEW_LINE> self.nest_node = None <NEW_LINE> self.food_node = None <NEW_LINE> self.solvers = solvers <NEW_LINE> self.edge_lines = {} <NEW_LINE> self.ant_sprites = None <NEW_LINE> self.wp_sprites = None <NEW_LINE> self.anim_panel_x = None <NEW_LINE> self.anim_panel_y = None <NEW_LINE> self.offset_screen_width = None <NEW_LINE> self.offset_screen_height = None <NEW_LINE> self.anim_panel_width = None <NEW_LINE> self.anim_panel_height = None <NEW_LINE> self.gui_panel_x = None <NEW_LINE> self.gui_panel_y = None <NEW_LINE> self.gui_panel_width = None <NEW_LINE> self.gui_panel_height = None <NEW_LINE> self.screen_width = None <NEW_LINE> self.screen_height = None <NEW_LINE> self.border_offset = None <NEW_LINE> self.show_only_shortest = False <NEW_LINE> self.done = False <NEW_LINE> self.paint = False <NEW_LINE> self.paint_erase = False <NEW_LINE> self.state = 'stop' <NEW_LINE> self.ant_color_dialog = None <NEW_LINE> self.runner = None <NEW_LINE> self.algorithm = None
Holds the whole state necessary for running the simulation
625990651b99ca40022900db
class BatchQuery(object): <NEW_LINE> <INDENT> def __init__(self, sync_cls): <NEW_LINE> <INDENT> self._sync_cls = sync_cls <NEW_LINE> self._qs_collection = defaultdict(list) <NEW_LINE> <DEDENT> @property <NEW_LINE> def qs_collection(self): <NEW_LINE> <INDENT> return self._qs_collection <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._qs_collection.clear() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, t, value, traceback): <NEW_LINE> <INDENT> self.run() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> for pk, qss in six.iteritems(self._qs_collection): <NEW_LINE> <INDENT> qs = reduce(operator.or_, qss) <NEW_LINE> qs_path = qs.get_path() <NEW_LINE> pk_path = pk.get_path() <NEW_LINE> logger.info('{}.filter({}).update({})'.format(self._sync_cls, pk_path, qs_path)) <NEW_LINE> with measure_time(): <NEW_LINE> <INDENT> updated_number = self._sync_cls._meta.document.objects.filter(**pk_path).update(**qs_path) <NEW_LINE> <DEDENT> if updated_number == 0 and self.is_instance_of_parent(pk.instance): <NEW_LINE> <INDENT> logger.warning('%s with path %s is not in mongo. Saving to %s.' % ( pk.instance.__class__, pk_path, self._sync_cls)) <NEW_LINE> self._sync_cls.create_document(pk.instance, with_embedded=True).save() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def is_instance_of_parent(self, instance): <NEW_LINE> <INDENT> model = self._sync_cls._meta.model <NEW_LINE> return model is not None and isinstance(instance, model) <NEW_LINE> <DEDENT> def __setitem__(self, key, qs): <NEW_LINE> <INDENT> pk = self._get_pk(key) <NEW_LINE> self._qs_collection[pk].append(qs) <NEW_LINE> <DEDENT> def _get_pk(self, k): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ins, sfield = k <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> ins, sfield = k, None <NEW_LINE> <DEDENT> return QSPk(sync_cls=self._sync_cls, instance=ins, sfield=sfield)
BatchQuery является контекстным менеджером и используется для накопления запросов и их слияния, если это возможно. Т.е. BatchQuery пытается сделать как можно меньше запросов к базе. Запросы будут слиты, если аргументы к функции filter() у них одинаковы.
625990651f5feb6acb164337
class ShareReadWriteVolumeGatewayApiTest(CommonReadWriteVolumeGatewayApiTest): <NEW_LINE> <INDENT> def setup_volume(self): <NEW_LINE> <INDENT> sharer = make_storage_user( username='sharer', max_storage_bytes=2000) <NEW_LINE> self.owner = sharer <NEW_LINE> root = StorageObject.objects.get_root(sharer._user) <NEW_LINE> rw_node = root.make_subdirectory('WriteMe') <NEW_LINE> rw_share = self.factory.make_share( subtree=rw_node, shared_to=self.user._user, name='WriteShare', access=Share.MODIFY) <NEW_LINE> rw_share.accept() <NEW_LINE> share_dao = SharedDirectory(rw_share, by_user=sharer) <NEW_LINE> self.vgw = ReadWriteVolumeGateway(self.user, share=share_dao) <NEW_LINE> self.root = StorageObject.objects.get(id=self.vgw.get_root().id) <NEW_LINE> self.file = self.root.make_file( 'TheName', content_blob=self.factory.make_content_blob(), mimetype='fakemime')
Test the ReadWriteVolumeGateway API against for a UDF volume. For each type of volume, override setup_volume. This class tests the user's root volume.
62599065cb5e8a47e493cd2a
class Object(Node): <NEW_LINE> <INDENT> def __init__(self, name: str, parent: ParentType = None): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._parent = parent <NEW_LINE> self._children = {} <NEW_LINE> return <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_leaf() -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_name(self) -> str: <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def set_parent(self, parent: ParentType): <NEW_LINE> <INDENT> self._parent = parent <NEW_LINE> return <NEW_LINE> <DEDENT> def get_parent(self) -> ParentType: <NEW_LINE> <INDENT> return self._parent <NEW_LINE> <DEDENT> def add_child(self, node: NodeType) -> NodeType: <NEW_LINE> <INDENT> name = node.get_name() <NEW_LINE> if name in self._children: <NEW_LINE> <INDENT> raise KeyError("Child name already exists: %s" % name) <NEW_LINE> <DEDENT> self._children[name] = node <NEW_LINE> return node <NEW_LINE> <DEDENT> def remove_child(self, name: str) -> NodeType: <NEW_LINE> <INDENT> node = self._children.get(name) <NEW_LINE> if not node: <NEW_LINE> <INDENT> raise KeyError("No such child: %s" % name) <NEW_LINE> <DEDENT> del self._children[name] <NEW_LINE> return node <NEW_LINE> <DEDENT> def has_child(self, name: str) -> bool: <NEW_LINE> <INDENT> return name in self._children <NEW_LINE> <DEDENT> def get_child(self, name: str) -> NodeType: <NEW_LINE> <INDENT> return self._children.get(name) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return self._children.items() <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self._children.keys() <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return self._children.values() <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> pass
Base class for configuration object nodes.
62599065796e427e5384fec2
class GaussFilter(control.Control): <NEW_LINE> <INDENT> def apply_control(self): <NEW_LINE> <INDENT> self.image_out = cv2.GaussianBlur(self.base_image, (25, 25), 0) <NEW_LINE> return self.image_out
GaussFilter a simple Gaussian blur filter control
62599065d7e4931a7ef3d72b
@attr.s <NEW_LINE> class Statusmap(Panel): <NEW_LINE> <INDENT> alert = attr.ib(default=None) <NEW_LINE> cards = attr.ib( default={ 'cardRound': None, 'cardMinWidth': 5, 'cardHSpacing': 2, 'cardVSpacing': 2, }, validator=instance_of(dict)) <NEW_LINE> color = attr.ib( default=attr.Factory(StatusmapColor), validator=instance_of(StatusmapColor), ) <NEW_LINE> isNew = attr.ib(default=True, validator=instance_of(bool)) <NEW_LINE> legend = attr.ib( default=attr.Factory(Legend), validator=instance_of(Legend), ) <NEW_LINE> nullPointMode = attr.ib(default=NULL_AS_ZERO) <NEW_LINE> tooltip = attr.ib( default=attr.Factory(Tooltip), validator=instance_of(Tooltip), ) <NEW_LINE> xAxis = attr.ib( default=attr.Factory(XAxis), validator=instance_of(XAxis) ) <NEW_LINE> yAxis = attr.ib( default=attr.Factory(YAxis), validator=instance_of(YAxis) ) <NEW_LINE> def to_json_data(self): <NEW_LINE> <INDENT> graphObject = { 'color': self.color, 'isNew': self.isNew, 'legend': self.legend, 'minSpan': self.minSpan, 'nullPointMode': self.nullPointMode, 'tooltip': self.tooltip, 'type': STATUSMAP_TYPE, 'xaxis': self.xAxis, 'yaxis': self.yAxis, } <NEW_LINE> if self.alert: <NEW_LINE> <INDENT> graphObject['alert'] = self.alert <NEW_LINE> <DEDENT> return self.panel_json(graphObject)
Generates json structure for the flant-statusmap-panel visualisation plugin (https://grafana.com/grafana/plugins/flant-statusmap-panel/). :param alert: Alert :param cards: A statusmap card object: keys 'cardRound', 'cardMinWidth', 'cardHSpacing', 'cardVSpacing' :param color: A StatusmapColor object :param isNew: isNew :param legend: Legend object :param nullPointMode: null :param tooltip: Tooltip object :param xAxis: XAxis object :param yAxis: YAxis object
62599065a8370b77170f1b1a
class TestCategory(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.category = Category() <NEW_LINE> <DEDENT> def test_add_category(self): <NEW_LINE> <INDENT> self.assertRaises( RuntimeError, self.category.add_category, "Categoryname") <NEW_LINE> <DEDENT> def test_delete_category(self): <NEW_LINE> <INDENT> self.assertRaises(IndexError, self.category.delete_category, 0) <NEW_LINE> <DEDENT> def test_edit_category(self): <NEW_LINE> <INDENT> self.assertRaises( IndexError, self.category.edit_category, "Categoryname", 0)
This class tests the category class
625990653617ad0b5ee0789c
class _ground_state(_state): <NEW_LINE> <INDENT> def __init__(self, ts = '', te = 0.0, prop = {}, contrib = {}): <NEW_LINE> <INDENT> _state.__init__(self, ts, te, prop) <NEW_LINE> if (len(contrib) == 0): <NEW_LINE> <INDENT> self.dict_of_econtrib = { 'hf': te } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dict_of_econtrib = contrib <NEW_LINE> <DEDENT> <DEDENT> def info(self): <NEW_LINE> <INDENT> _state.info(self) <NEW_LINE> print(" Energy composition:") <NEW_LINE> for name,value in list(self.dict_of_econtrib.items()): <NEW_LINE> <INDENT> print((" E(" + name + ") = " + repr(value)))
Ground state computed using ADC (derived from _state) Additional attributes: - dict_of_econtrib - Dictionary of energy contributions (in a.u.)
625990658a43f66fc4bf38dc
class Notebook: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.notes = [] <NEW_LINE> <DEDENT> def new_note(self, memo, tags=''): <NEW_LINE> <INDENT> self.notes.append(Note(memo, tags)) <NEW_LINE> <DEDENT> def _find_note(self, note_id): <NEW_LINE> <INDENT> for note in self.notes: <NEW_LINE> <INDENT> if note.id == int(note_id): <NEW_LINE> <INDENT> return note <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def modify_memo(self, note_id, memo): <NEW_LINE> <INDENT> self._find_note(note_id).memo = memo <NEW_LINE> <DEDENT> def modify_tags(self, note_id, tags): <NEW_LINE> <INDENT> self._find_note(note_id).tags = tags <NEW_LINE> <DEDENT> def search(self, filter): <NEW_LINE> <INDENT> return [note for note in self.notes if note.match(filter)]
Represent a collection of notes that can be tagged, modified, and searched.
625990655fdd1c0f98e5f6d0
class CreateWithInlinesView(SingleObjectTemplateResponseMixin, BaseCreateWithInlinesView): <NEW_LINE> <INDENT> template_name_suffix = '_form'
View for creating a new object instance with related model instances, with a response rendered by template.
62599065379a373c97d9a76a
class ToNumpy: <NEW_LINE> <INDENT> def __call__(self, data): <NEW_LINE> <INDENT> data['audio'] = np.array(data['audio']) <NEW_LINE> return data
Transform to make numpy array
62599065f548e778e596ccd6
class TestLogicNotConvert(OPConvertAutoScanTest): <NEW_LINE> <INDENT> def sample_convert_config(self, draw): <NEW_LINE> <INDENT> input1_shape = draw( st.lists( st.integers( min_value=10, max_value=20), min_size=2, max_size=4)) <NEW_LINE> dtype = "bool" <NEW_LINE> config = { "op_names": ["logical_not"], "test_data_shapes": [input1_shape], "test_data_types": [[dtype]], "opset_version": [7, 9, 15], "input_spec_shape": [] } <NEW_LINE> model = NetNot(config) <NEW_LINE> return (config, model) <NEW_LINE> <DEDENT> def test(self): <NEW_LINE> <INDENT> self.run_and_statis(max_examples=30, max_duration=-1)
api: logical_not ops OPset version: 7, 9, 15
6259906591f36d47f2231a35
class ECHO(object): <NEW_LINE> <INDENT> def __init__(self, pin, temp=20, IO_mode=IO.BCM): <NEW_LINE> <INDENT> self.pin = pin <NEW_LINE> self.temp = temp <NEW_LINE> self.IO_mode = IO_mode <NEW_LINE> self.t_start = 0 <NEW_LINE> self.TOF = 0 <NEW_LINE> self.dist = 0 <NEW_LINE> self.speed_of_sound = 331.3 * math.sqrt(1+(self.temp / 273.15)) <NEW_LINE> self.timeout = 6 / self.speed_of_sound <NEW_LINE> self.TOF_1m = 1 / self.speed_of_sound <NEW_LINE> self.arr = [0]*10 <NEW_LINE> self.i = 0 <NEW_LINE> IO.setwarnings(False) <NEW_LINE> IO.setmode(self.IO_mode) <NEW_LINE> IO.setup(self.pin, IO.IN, pull_up_down=IO.PUD_UP) <NEW_LINE> IO.add_event_detect(self.pin, IO.BOTH, callback=self.callback) <NEW_LINE> <DEDENT> def callback(self, channel): <NEW_LINE> <INDENT> if(self.i >=10): <NEW_LINE> <INDENT> self.i=0 <NEW_LINE> <DEDENT> if (IO.input(self.pin) == IO.HIGH): <NEW_LINE> <INDENT> self.t_start = time.time() <NEW_LINE> <DEDENT> elif (IO.input(self.pin) == IO.LOW): <NEW_LINE> <INDENT> tof = time.time() - self.t_start <NEW_LINE> if((self.TOF == 0 and tof<self.timeout) or tof < self.TOF + self.TOF_1m): <NEW_LINE> <INDENT> self.TOF = tof <NEW_LINE> self.arr[self.i]=self.TOF * ((self.speed_of_sound)/(2)) <NEW_LINE> self.i+=1 <NEW_LINE> self.dist = sum(self.arr)/len(self.arr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def GetMeters(self): <NEW_LINE> <INDENT> return round(self.dist, 5) <NEW_LINE> <DEDENT> def GetFeet(self): <NEW_LINE> <INDENT> return round(self.dist * 3.28084, 5) <NEW_LINE> <DEDENT> def GetInch(self): <NEW_LINE> <INDENT> return round(self.dist * 3.28084 * 12, 5) <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> IO.cleanup((self.pin))
ECHO class will catch hardware changes ie the echo. Parameters: (2) pin (int) : help= give pin number (4) temp (sec) : default = 20, help= temperature in celsius (5) IO_mode : default = IO.BCM, help= BCM or BOARD
6259906555399d3f05627c6d
class Entity: <NEW_LINE> <INDENT> def __init__( self, x: int, y: int, char: str, color: Tuple[int, int, int], renderer, font_size=20, tile_size=20, ): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.char = char <NEW_LINE> self.color = sdl2.ext.Color(*color) <NEW_LINE> self.font_size = font_size <NEW_LINE> self.bg_color = sdl2.ext.Color(0, 0, 0, 255) <NEW_LINE> self.tile_size = tile_size <NEW_LINE> self.renderer = renderer <NEW_LINE> font_manager = sdl2.ext.FontManager( font_path="C:\\Windows\\Fonts\\arial.ttf", size=font_size, color=self.color, bg_color=self.bg_color, ) <NEW_LINE> factory = sdl2.ext.SpriteFactory(renderer=self.renderer) <NEW_LINE> self.text = factory.from_text(self.char, fontmanager=font_manager) <NEW_LINE> self.x_offset = -self.text.size[0] // 2 <NEW_LINE> self.y_offset = -self.text.size[1] // 2 <NEW_LINE> self.text_width = self.text.size[0] <NEW_LINE> self.text_height = self.text.size[1] <NEW_LINE> self.pixel_x = self.x * self.tile_size + self.x_offset <NEW_LINE> self.pixel_y = self.y * self.tile_size + self.y_offset <NEW_LINE> <DEDENT> def move(self, dx: int, dy: int) -> None: <NEW_LINE> <INDENT> self.x += dx <NEW_LINE> self.y += dy <NEW_LINE> self.pixel_x = self.x * self.tile_size + self.x_offset <NEW_LINE> self.pixel_y = self.y * self.tile_size + self.y_offset
A generic object to represent players, enemies, items, etc.
625990658e7ae83300eea7da
class SignalingNaNException(object): <NEW_LINE> <INDENT> def __init__(self, snan): <NEW_LINE> <INDENT> self.snan = snan <NEW_LINE> <DEDENT> def default_handler(self, attributes): <NEW_LINE> <INDENT> attributes.flag_set.add(invalid) <NEW_LINE> return self.snan <NEW_LINE> <DEDENT> def signal(self, attributes): <NEW_LINE> <INDENT> return attributes.invalid_operation_handler(self, attributes)
InvalidOperation exception signaled as a result of an arithmetic operation encountering a signaling NaN.
6259906563d6d428bbee3e2f
class Senz(hmm.HMM): <NEW_LINE> <INDENT> def __init__(self, model = model.SenzModel()): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.hidden_state = model.mDefaultHiddenStateSet <NEW_LINE> self.visible_output_obj = model.mDefaultVisibleOutputSet <NEW_LINE> hmm.HMM.__init__(self, self.visible_output_obj, self.hidden_state, model.mDefaultPi, model.mDefaultTransitionMatrix, model.mDefaultEmissionMatrix) <NEW_LINE> <DEDENT> def initTrainSample(self, output): <NEW_LINE> <INDENT> output_obj = [] <NEW_LINE> for o in output: <NEW_LINE> <INDENT> output_obj.append(self.outputDictToObj(o)) <NEW_LINE> <DEDENT> hmm.HMM.initTrainSample(self, output_obj) <NEW_LINE> <DEDENT> def outputDictToObj(self, output_dict): <NEW_LINE> <INDENT> for b in self.visible_output_obj: <NEW_LINE> <INDENT> if output_dict == b.getEvidences(): <NEW_LINE> <INDENT> return b <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def matrixToDict(self, matrix, row, col): <NEW_LINE> <INDENT> dict = {} <NEW_LINE> i = 0 <NEW_LINE> if row == 0: <NEW_LINE> <INDENT> for c in col: <NEW_LINE> <INDENT> dict[c] = matrix[i] <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> return dict <NEW_LINE> <DEDENT> for r in row: <NEW_LINE> <INDENT> j = 0 <NEW_LINE> dict[r] = {} <NEW_LINE> for c in col: <NEW_LINE> <INDENT> dict[r][c] = matrix[i][j] <NEW_LINE> j += 1 <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> return dict
SENZ
625990653eb6a72ae038bdac
class GraphKeys(object): <NEW_LINE> <INDENT> VARIABLES = "variables" <NEW_LINE> TRAINABLE_VARIABLES = "trainable_variables" <NEW_LINE> LOCAL_VARIABLES = "local_variables" <NEW_LINE> MODEL_VARIABLES = "model_variables" <NEW_LINE> SUMMARIES = "summaries" <NEW_LINE> QUEUE_RUNNERS = "queue_runners" <NEW_LINE> TABLE_INITIALIZERS = "table_initializer" <NEW_LINE> ASSET_FILEPATHS = "asset_filepaths" <NEW_LINE> MOVING_AVERAGE_VARIABLES = "moving_average_variables" <NEW_LINE> REGULARIZATION_LOSSES = "regularization_losses" <NEW_LINE> CONCATENATED_VARIABLES = "concatenated_variables" <NEW_LINE> SAVERS = "savers" <NEW_LINE> WEIGHTS = "weights" <NEW_LINE> BIASES = "biases" <NEW_LINE> ACTIVATIONS = "activations" <NEW_LINE> UPDATE_OPS = "update_ops" <NEW_LINE> LOSSES = "losses" <NEW_LINE> INIT_OP = "init_op" <NEW_LINE> LOCAL_INIT_OP = "local_init_op" <NEW_LINE> READY_OP = "ready_op" <NEW_LINE> SUMMARY_OP = "summary_op" <NEW_LINE> GLOBAL_STEP = "global_step"
Standard names to use for graph collections. The standard library uses various well-known names to collect and retrieve values associated with a graph. For example, the `tf.Optimizer` subclasses default to optimizing the variables collected under `tf.GraphKeys.TRAINABLE_VARIABLES` if none is specified, but it is also possible to pass an explicit list of variables. The following standard keys are defined: * `VARIABLES`: the `Variable` objects that comprise a model, and must be saved and restored together. See [`tf.all_variables()`](../../api_docs/python/state_ops.md#all_variables) for more details. * `TRAINABLE_VARIABLES`: the subset of `Variable` objects that will be trained by an optimizer. See [`tf.trainable_variables()`](../../api_docs/python/state_ops.md#trainable_variables) for more details. * `SUMMARIES`: the summary `Tensor` objects that have been created in the graph. See [`tf.merge_all_summaries()`](../../api_docs/python/train.md#merge_all_summaries) for more details. * `QUEUE_RUNNERS`: the `QueueRunner` objects that are used to produce input for a computation. See [`tf.start_queue_runners()`](../../api_docs/python/train.md#start_queue_runners) for more details. * `MOVING_AVERAGE_VARIABLES`: the subset of `Variable` objects that will also keep moving averages. See [`tf.moving_average_variables()`](../../api_docs/python/state_ops.md#moving_average_variables) for more details. * `REGULARIZATION_LOSSES`: regularization losses collected during graph construction. * `WEIGHTS`: weights inside neural network layers * `BIASES`: biases inside neural network layers * `ACTIVATIONS`: activations of neural network layers
625990650a50d4780f706966
class CoursesViewSet(ModelViewSet): <NEW_LINE> <INDENT> queryset = Courses.objects.all() <NEW_LINE> serializer_class = CourseSerializer
Вывод списка курсов
625990658da39b475be04936
class TP_Tileable_Add_Vertices_Offset(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "tp_ops.add_vertices_offset" <NEW_LINE> bl_label = "Offset Vertices" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> bpy.types.Scene.tp_verts_offset = bpy.props.EnumProperty( items=[("tp_verts_x" ,"X Axis" ,"X Axis"), ("tp_verts_y" ,"Y Axis" ,"Y Axis"), ("tp_verts_xy" ,"XY Axis" ,"XY Axis")], name = "Offset", default = "tp_verts_xy", description = "Add single vertices to correct array offset") <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> print(self) <NEW_LINE> self.report({'INFO'}, "add single vertices to correct array offset") <NEW_LINE> scene = bpy.context.scene <NEW_LINE> if scene.tp_verts_offset == "tp_verts_x": <NEW_LINE> <INDENT> verts_x = [(-17.34, -17.34, 0), (-5.78, -17.34, 0)] <NEW_LINE> mesh_data = bpy.data.meshes.new("vertices") <NEW_LINE> mesh_data.from_pydata(verts_x, [], []) <NEW_LINE> mesh_data.update() <NEW_LINE> <DEDENT> if scene.tp_verts_offset == "tp_verts_y": <NEW_LINE> <INDENT> verts_y = [(-17.34, -17.34, 0), (-17.34, -5.78, 0)] <NEW_LINE> mesh_data = bpy.data.meshes.new("vertices") <NEW_LINE> mesh_data.from_pydata(verts_y, [], []) <NEW_LINE> mesh_data.update() <NEW_LINE> <DEDENT> if scene.tp_verts_offset == "tp_verts_xy": <NEW_LINE> <INDENT> verts_xy = [(-17.34, -5.78, 0), (-17.34, -17.34, 0), (-17.34, 5.78, 0)] <NEW_LINE> mesh_data = bpy.data.meshes.new("vertices") <NEW_LINE> mesh_data.from_pydata(verts_xy, [], []) <NEW_LINE> mesh_data.update() <NEW_LINE> <DEDENT> obj = bpy.data.objects.new("offset", mesh_data) <NEW_LINE> scene.objects.link(obj) <NEW_LINE> obj.select = True <NEW_LINE> if bpy.context.mode == "EDIT_MESH": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bpy.ops.object.join() <NEW_LINE> <DEDENT> return {'FINISHED'}
Add single vertices to correct array offset
625990653cc13d1c6d466e90
class TweetModel(Base, BaseModelMixin): <NEW_LINE> <INDENT> __tablename__ = 'tweets' <NEW_LINE> uid = Column(Integer, primary_key=True) <NEW_LINE> update_date = Column(DateTime) <NEW_LINE> title_text = Column(String) <NEW_LINE> main_text = Column(String) <NEW_LINE> stream_id = Column(String) <NEW_LINE> video_url = Column(String) <NEW_LINE> hashtags = Column(ObjectArray)
The category SQL Alchemy model.
62599065ac7a0e7691f73c32
class NSNitroNserrWiSiteInvalStaurl(NSNitroSslvpnAaaErrors): <NEW_LINE> <INDENT> pass
Nitro error code 2665 Invalid staURL
62599065e76e3b2f99fda14d
class CSArgParser(argparse.ArgumentParser): <NEW_LINE> <INDENT> def error(self, message, exit=False): <NEW_LINE> <INDENT> sys.stderr.write('Error: {}\n'.format(message)) <NEW_LINE> self.print_help() <NEW_LINE> if exit: <NEW_LINE> <INDENT> sys.exit(2)
Argument parser that shows help if there is an error
625990651b99ca40022900dc
class Bocca(object): <NEW_LINE> <INDENT> def __init__(self, bocca=None): <NEW_LINE> <INDENT> self._bocca = bocca or which('bocca') <NEW_LINE> if self._bocca is None: <NEW_LINE> <INDENT> raise RuntimeError('unable to find bocca') <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def bocca(self): <NEW_LINE> <INDENT> return self._bocca <NEW_LINE> <DEDENT> def create_project(self, name, language=None, ifexists='raise'): <NEW_LINE> <INDENT> if ifexists not in ['pass', 'raise', 'clobber']: <NEW_LINE> <INDENT> raise ValueError('ifexists value not understood') <NEW_LINE> <DEDENT> options = [] <NEW_LINE> if language is not None: <NEW_LINE> <INDENT> options += ['--language=%s' % language] <NEW_LINE> <DEDENT> if is_bocca_project(name): <NEW_LINE> <INDENT> if ifexists == 'raise': <NEW_LINE> <INDENT> raise ProjectExistsError('project exists') <NEW_LINE> <DEDENT> elif ifexists == 'clobber': <NEW_LINE> <INDENT> shutil.rmtree(name) <NEW_LINE> <DEDENT> <DEDENT> system([self.bocca, 'create', 'project', name] + options) <NEW_LINE> <DEDENT> def create_interface(self, name, sidl=None): <NEW_LINE> <INDENT> if sidl is None or sidl is True: <NEW_LINE> <INDENT> sidl = os.path.join(_PATH_TO_SIDL, name + '.sidl') <NEW_LINE> <DEDENT> options = [] <NEW_LINE> if isinstance(sidl, types.StringTypes): <NEW_LINE> <INDENT> options += ['--import-sidl=%s@%s' % (name, sidl)] <NEW_LINE> <DEDENT> system([self.bocca, 'create', 'interface', name] + options) <NEW_LINE> <DEDENT> def create_class(self, name, implements=None, language=None, sidl=None, impl=None): <NEW_LINE> <INDENT> options = [] <NEW_LINE> if sidl is not None: <NEW_LINE> <INDENT> options += ['--import-sidl=%s@%s' % (name, sidl)] <NEW_LINE> <DEDENT> if impl is not None: <NEW_LINE> <INDENT> options += ['--import-impl=%s@%s' % (name, impl)] <NEW_LINE> <DEDENT> if language is not None: <NEW_LINE> <INDENT> options += ['--language=%s' % language] <NEW_LINE> <DEDENT> if implements is not None: <NEW_LINE> <INDENT> options += ['--implements=%s' % implements] <NEW_LINE> <DEDENT> system([self.bocca, 'create', 'class', name] + options) <NEW_LINE> if impl: <NEW_LINE> <INDENT> for fname in ['make.vars.user', 'make.rules.user']: <NEW_LINE> <INDENT> shutil.copy(os.path.join(impl, fname), os.path.join('components', name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def create_bmi_class(self, name, language='c', bmi_mapping=None, impl=None): <NEW_LINE> <INDENT> bmi_mapping = bmi_mapping or {} <NEW_LINE> if not name.startswith('csdms.'): <NEW_LINE> <INDENT> name = 'csdms.' + name <NEW_LINE> <DEDENT> class_name = name.split('.')[-1] <NEW_LINE> name = '.'.join(name.split('.')[:-1] + [normalize_class_name(class_name)]) <NEW_LINE> kwds = dict(implements='csdms.core.Bmi', language=language) <NEW_LINE> if impl is None: <NEW_LINE> <INDENT> self.create_class(name, **kwds) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with mktemp(prefix='csdms', suffix='.d') as _: <NEW_LINE> <INDENT> kwds['impl'] = make_impl_dir(name, language, subs=bmi_mapping) <NEW_LINE> self.create_class(name, **kwds)
Build babel projects with bocca.
625990651f5feb6acb164339
class FakeLock(object): <NEW_LINE> <INDENT> def __init__(self, path, identifier=None): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.identifier = identifier
A fake Lock for testing.
62599065435de62698e9d556
class FocalMechanism(__FocalMechanism): <NEW_LINE> <INDENT> pass
This class describes the focal mechanism of an event. It includes different descriptions like nodal planes, principal axes, and a moment tensor. The moment tensor description is provided by objects of the class MomentTensor which can be specified as child elements of FocalMechanism. :type resource_id: :class:`~obspy.core.event.ResourceIdentifier` :param resource_id: Resource identifier of FocalMechanism. :type force_resource_id: bool, optional :param force_resource_id: If set to False, the automatic initialization of `resource_id` attribute in case it is not specified will be skipped. :type triggering_origin_id: :class:`~obspy.core.event.ResourceIdentifier`, optional :param triggering_origin_id: Refers to the resource_id of the triggering origin. :type nodal_planes: :class:`~obspy.core.event.NodalPlanes`, optional :param nodal_planes: Nodal planes of the focal mechanism. :type principal_axes: :class:`~obspy.core.event.PrincipalAxes`, optional :param principal_axes: Principal axes of the focal mechanism. :type azimuthal_gap: float, optional :param azimuthal_gap: Largest azimuthal gap in distribution of stations used for determination of focal mechanism. Unit: deg :type station_polarity_count: int, optional :param station_polarity_count: :type misfit: float, optional :param misfit: Fraction of misfit polarities in a first-motion focal mechanism determination. Decimal fraction between 0 and 1. :type station_distribution_ratio: float, optional :param station_distribution_ratio: Station distribution ratio (STDR) parameter. Indicates how the stations are distributed about the focal sphere (Reasenberg and Oppenheimer 1985). Decimal fraction between 0 and 1. :type method_id: :class:`~obspy.core.event.ResourceIdentifier`, optional :param method_id: Resource identifier of the method used for determination of the focal mechanism. :type waveform_id: list of :class:`~obspy.core.event.WaveformStreamID`, optional :param waveform_id: Refers to a set of waveform streams from which the focal mechanism was derived. :type evaluation_mode: str, optional :param evaluation_mode: Evaluation mode of FocalMechanism. Allowed values are the following: * ``"manual"`` * ``"automatic"`` :type evaluation_status: str, optional :param evaluation_status: Evaluation status of FocalMechanism. Allowed values are the following: * ``"preliminary"`` * ``"confirmed"`` * ``"reviewed"`` * ``"final"`` * ``"rejected"`` * ``"reported"`` :type moment_tensor: :class:`~obspy.core.event.MomentTensor`, optional :param moment_tensor: Moment tensor description for this focal mechanism. :type comments: list of :class:`~obspy.core.event.Comment`, optional :param comments: Additional comments. :type creation_info: :class:`~obspy.core.event.CreationInfo`, optional :param creation_info: Creation information used to describe author, version, and creation time.
62599065460517430c432bfb
class Popen(Process): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.__subproc = subprocess32.Popen(*args, **kwargs) <NEW_LINE> self._init(self.__subproc.pid, _ignore_nsp=True) <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return sorted(set(dir(Popen) + dir(subprocess32.Popen))) <NEW_LINE> <DEDENT> def __getattribute__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return object.__getattribute__(self, name) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return object.__getattribute__(self.__subproc, name) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise AttributeError("%s instance has no attribute '%s'" % (self.__class__.__name__, name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def wait(self, timeout=None): <NEW_LINE> <INDENT> if self.__subproc.returncode is not None: <NEW_LINE> <INDENT> return self.__subproc.returncode <NEW_LINE> <DEDENT> ret = super(Popen, self).wait(timeout) <NEW_LINE> self.__subproc.returncode = ret <NEW_LINE> return ret
A more convenient interface to stdlib subprocess32 module. It starts a sub process and deals with it exactly as when using subprocess32.Popen class but in addition also provides all the properties and methods of psutil.Process class as a unified interface: >>> import psutil >>> from ambari_commons.subprocess32 import PIPE >>> p = psutil.Popen(["python", "-c", "print 'hi'"], stdout=PIPE) >>> p.name() 'python' >>> p.uids() user(real=1000, effective=1000, saved=1000) >>> p.username() 'giampaolo' >>> p.communicate() ('hi ', None) >>> p.terminate() >>> p.wait(timeout=2) 0 >>> For method names common to both classes such as kill(), terminate() and wait(), psutil.Process implementation takes precedence. Unlike subprocess32.Popen this class pre-emptively checks wheter PID has been reused on send_signal(), terminate() and kill() so that you don't accidentally terminate another process, fixing http://bugs.python.org/issue6973. For a complete documentation refer to: http://docs.python.org/library/subprocess32.html
62599065a8370b77170f1b1c
class Jezail(Rifle, FullyImplemented): <NEW_LINE> <INDENT> Name: str = "Jezail Musket" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__(caliber=Caliber.BB, action=FiringAction.SingleShot, capacity=1, range_falloff=.3, base_damage=20, name=self.Name, weightlb=12)
Based on the Jezail Musket https://en.wikipedia.org/wiki/Jezail
62599065d486a94d0ba2d717