body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
4281572e9d8fc9d488e7ce0e2ba2e71e00cba1e9e7a1ed68a517384589092b87
def branch_protection_push_allowances(rule: BranchProtectionRuleNode) -> List[PushAllowance]: 'Return the list of push allowances for the given branch protection.' return rule['pushAllowances']
Return the list of push allowances for the given branch protection.
src/ghaudit/schema.py
branch_protection_push_allowances
DistantThunder/ghaudit
1
python
def branch_protection_push_allowances(rule: BranchProtectionRuleNode) -> List[PushAllowance]: return rule['pushAllowances']
def branch_protection_push_allowances(rule: BranchProtectionRuleNode) -> List[PushAllowance]: return rule['pushAllowances']<|docstring|>Return the list of push allowances for the given branch protection.<|endoftext|>
8870e4f4ba0eacfa95593478b186cc8dd89837ef9329a9c66738daefeb23ccc1
def push_allowance_actor(allowance: PushAllowance) -> Actor: 'Return the actor that can dismiss the branch protection rule.' return allowance['actor']
Return the actor that can dismiss the branch protection rule.
src/ghaudit/schema.py
push_allowance_actor
DistantThunder/ghaudit
1
python
def push_allowance_actor(allowance: PushAllowance) -> Actor: return allowance['actor']
def push_allowance_actor(allowance: PushAllowance) -> Actor: return allowance['actor']<|docstring|>Return the actor that can dismiss the branch protection rule.<|endoftext|>
57fab8ccd2851a21a9f485b863d8e2b6e0da3a79b96233afd8f16ae1679fa1a0
def actor_type(actor: Actor) -> ActorType: 'Return the type of a given actor.' return actor['__typename']
Return the type of a given actor.
src/ghaudit/schema.py
actor_type
DistantThunder/ghaudit
1
python
def actor_type(actor: Actor) -> ActorType: return actor['__typename']
def actor_type(actor: Actor) -> ActorType: return actor['__typename']<|docstring|>Return the type of a given actor.<|endoftext|>
bfe17e9c7c3bcafb0338e4eee534ee3d5951d609cf6e3fc2155d5edaef1f9b26
def actor_get_user(rstate: Rstate, actor: Actor) -> User: 'Return a user from a given actor.' return user_by_id(rstate, actor['id'])
Return a user from a given actor.
src/ghaudit/schema.py
actor_get_user
DistantThunder/ghaudit
1
python
def actor_get_user(rstate: Rstate, actor: Actor) -> User: return user_by_id(rstate, actor['id'])
def actor_get_user(rstate: Rstate, actor: Actor) -> User: return user_by_id(rstate, actor['id'])<|docstring|>Return a user from a given actor.<|endoftext|>
3f5175e2f0feeed655e51b0a45511bce3529f093f34d68c297486ad213ccae39
def actor_get_team(rstate: Rstate, actor: Actor) -> Team: 'Return a team from a given actor.' return org_team_by_id(rstate, actor['id'])
Return a team from a given actor.
src/ghaudit/schema.py
actor_get_team
DistantThunder/ghaudit
1
python
def actor_get_team(rstate: Rstate, actor: Actor) -> Team: return org_team_by_id(rstate, actor['id'])
def actor_get_team(rstate: Rstate, actor: Actor) -> Team: return org_team_by_id(rstate, actor['id'])<|docstring|>Return a team from a given actor.<|endoftext|>
790bfc2b69bccd4f8d66c79b3979028e88e33c315ba1961f35a0c2d62c61dcdf
def actor_get_app(rstate, actor) -> None: 'Return an App from a given actor.' raise NotImplementedError()
Return an App from a given actor.
src/ghaudit/schema.py
actor_get_app
DistantThunder/ghaudit
1
python
def actor_get_app(rstate, actor) -> None: raise NotImplementedError()
def actor_get_app(rstate, actor) -> None: raise NotImplementedError()<|docstring|>Return an App from a given actor.<|endoftext|>
22128b196e5e4bef67a410b18de3907df599692b67aa6ad219de5247594a040c
def all_bp_rules(rstate: Rstate) -> Set[BranchProtectionRuleID]: 'Return the list of ID of all the branch protection rules.\n\n Return the list of ID of all the branch protection rules from all\n repositories.\n ' result = set() for repo in org_repositories(rstate): bprules = _repo_branch_protection_rules_noexcept(repo) if bprules: for bprule in bprules: result.add(branch_protection_id(bprule)) return result
Return the list of ID of all the branch protection rules. Return the list of ID of all the branch protection rules from all repositories.
src/ghaudit/schema.py
all_bp_rules
DistantThunder/ghaudit
1
python
def all_bp_rules(rstate: Rstate) -> Set[BranchProtectionRuleID]: 'Return the list of ID of all the branch protection rules.\n\n Return the list of ID of all the branch protection rules from all\n repositories.\n ' result = set() for repo in org_repositories(rstate): bprules = _repo_branch_protection_rules_noexcept(repo) if bprules: for bprule in bprules: result.add(branch_protection_id(bprule)) return result
def all_bp_rules(rstate: Rstate) -> Set[BranchProtectionRuleID]: 'Return the list of ID of all the branch protection rules.\n\n Return the list of ID of all the branch protection rules from all\n repositories.\n ' result = set() for repo in org_repositories(rstate): bprules = _repo_branch_protection_rules_noexcept(repo) if bprules: for bprule in bprules: result.add(branch_protection_id(bprule)) return result<|docstring|>Return the list of ID of all the branch protection rules. Return the list of ID of all the branch protection rules from all repositories.<|endoftext|>
9039538312546726132d4a8e3aa37f62a0ca233d84a6455f4eadb93f2002fab0
def empty() -> Rstate: 'Initialise the remote state.' return {'data': {'users': {}, 'organization': {'repositories': {'edges': []}, 'membersWithRole': [], 'teams': {'edges': []}}}}
Initialise the remote state.
src/ghaudit/schema.py
empty
DistantThunder/ghaudit
1
python
def empty() -> Rstate: return {'data': {'users': {}, 'organization': {'repositories': {'edges': []}, 'membersWithRole': [], 'teams': {'edges': []}}}}
def empty() -> Rstate: return {'data': {'users': {}, 'organization': {'repositories': {'edges': []}, 'membersWithRole': [], 'teams': {'edges': []}}}}<|docstring|>Initialise the remote state.<|endoftext|>
fb1dc68af628de81a343a3f7cb0c587137d772137fcd4afd62269d53843255fd
def validate(rstate: Rstate) -> bool: 'Validate the consistency of the remote state data structure.\n\n Either return true if no error is found, or raise a RuntimeError. The\n following checks are performed:\n\n * all repositories referenced by teams should be known\n * all users referenced by teams should be known\n * all users referenced by repositories should be known\n ' for repo in org_repositories(rstate): for missing_login in missing_collaborators(rstate, repo): msg = 'unknown users "{}" referenced as collaborators of "{}"' raise RuntimeError(msg.format(missing_login, repo_name(repo))) for team in org_teams(rstate): for missing_id in _missing_repositories(rstate, team): msg = 'unknown repositories referenced by ID "{}" in team "{}"' raise RuntimeError(msg.format(missing_id, team_name(team))) for missing_id in _missing_members(rstate, team): msg = 'unknown repositories referenced by ID "{}" in team "{}"' raise RuntimeError(msg.format(missing_id, team_name(team))) return True
Validate the consistency of the remote state data structure. Either return true if no error is found, or raise a RuntimeError. The following checks are performed: * all repositories referenced by teams should be known * all users referenced by teams should be known * all users referenced by repositories should be known
src/ghaudit/schema.py
validate
DistantThunder/ghaudit
1
python
def validate(rstate: Rstate) -> bool: 'Validate the consistency of the remote state data structure.\n\n Either return true if no error is found, or raise a RuntimeError. The\n following checks are performed:\n\n * all repositories referenced by teams should be known\n * all users referenced by teams should be known\n * all users referenced by repositories should be known\n ' for repo in org_repositories(rstate): for missing_login in missing_collaborators(rstate, repo): msg = 'unknown users "{}" referenced as collaborators of "{}"' raise RuntimeError(msg.format(missing_login, repo_name(repo))) for team in org_teams(rstate): for missing_id in _missing_repositories(rstate, team): msg = 'unknown repositories referenced by ID "{}" in team "{}"' raise RuntimeError(msg.format(missing_id, team_name(team))) for missing_id in _missing_members(rstate, team): msg = 'unknown repositories referenced by ID "{}" in team "{}"' raise RuntimeError(msg.format(missing_id, team_name(team))) return True
def validate(rstate: Rstate) -> bool: 'Validate the consistency of the remote state data structure.\n\n Either return true if no error is found, or raise a RuntimeError. The\n following checks are performed:\n\n * all repositories referenced by teams should be known\n * all users referenced by teams should be known\n * all users referenced by repositories should be known\n ' for repo in org_repositories(rstate): for missing_login in missing_collaborators(rstate, repo): msg = 'unknown users "{}" referenced as collaborators of "{}"' raise RuntimeError(msg.format(missing_login, repo_name(repo))) for team in org_teams(rstate): for missing_id in _missing_repositories(rstate, team): msg = 'unknown repositories referenced by ID "{}" in team "{}"' raise RuntimeError(msg.format(missing_id, team_name(team))) for missing_id in _missing_members(rstate, team): msg = 'unknown repositories referenced by ID "{}" in team "{}"' raise RuntimeError(msg.format(missing_id, team_name(team))) return True<|docstring|>Validate the consistency of the remote state data structure. Either return true if no error is found, or raise a RuntimeError. The following checks are performed: * all repositories referenced by teams should be known * all users referenced by teams should be known * all users referenced by repositories should be known<|endoftext|>
cc04d57a6f1f6e6c2fa1143c62783d2fae30f6a6abe1dd30f4de67a32586087a
def GetGestureRecognitionResults(self): '\n GetGestureRecognitionResults(self: InkCanvasGestureEventArgs) -> ReadOnlyCollection[GestureRecognitionResult]\n\n \n\n Returns results from the gesture recognizer.\n\n Returns: A collection of possible application gestures that the \n\n System.Windows.Controls.InkCanvasGestureEventArgs.Strokes might be.\n ' pass
GetGestureRecognitionResults(self: InkCanvasGestureEventArgs) -> ReadOnlyCollection[GestureRecognitionResult] Returns results from the gesture recognizer. Returns: A collection of possible application gestures that the System.Windows.Controls.InkCanvasGestureEventArgs.Strokes might be.
release/stubs.min/System/Windows/Controls/__init___parts/InkCanvasGestureEventArgs.py
GetGestureRecognitionResults
htlcnn/ironpython-stubs
182
python
def GetGestureRecognitionResults(self): '\n GetGestureRecognitionResults(self: InkCanvasGestureEventArgs) -> ReadOnlyCollection[GestureRecognitionResult]\n\n \n\n Returns results from the gesture recognizer.\n\n Returns: A collection of possible application gestures that the \n\n System.Windows.Controls.InkCanvasGestureEventArgs.Strokes might be.\n ' pass
def GetGestureRecognitionResults(self): '\n GetGestureRecognitionResults(self: InkCanvasGestureEventArgs) -> ReadOnlyCollection[GestureRecognitionResult]\n\n \n\n Returns results from the gesture recognizer.\n\n Returns: A collection of possible application gestures that the \n\n System.Windows.Controls.InkCanvasGestureEventArgs.Strokes might be.\n ' pass<|docstring|>GetGestureRecognitionResults(self: InkCanvasGestureEventArgs) -> ReadOnlyCollection[GestureRecognitionResult] Returns results from the gesture recognizer. Returns: A collection of possible application gestures that the System.Windows.Controls.InkCanvasGestureEventArgs.Strokes might be.<|endoftext|>
397e9acee46fd281b92e4f952fd76f42a1737f347603a5825806a67e3506c411
@staticmethod def __new__(self, strokes, gestureRecognitionResults): ' __new__(cls: type,strokes: StrokeCollection,gestureRecognitionResults: IEnumerable[GestureRecognitionResult]) ' pass
__new__(cls: type,strokes: StrokeCollection,gestureRecognitionResults: IEnumerable[GestureRecognitionResult])
release/stubs.min/System/Windows/Controls/__init___parts/InkCanvasGestureEventArgs.py
__new__
htlcnn/ironpython-stubs
182
python
@staticmethod def __new__(self, strokes, gestureRecognitionResults): ' ' pass
@staticmethod def __new__(self, strokes, gestureRecognitionResults): ' ' pass<|docstring|>__new__(cls: type,strokes: StrokeCollection,gestureRecognitionResults: IEnumerable[GestureRecognitionResult])<|endoftext|>
1b575a491ef5fb53bb4d661142a1bd19bf60b126ceaa7139d032928e8488e8ad
def save_settings(self): 'saves settings inside objects oyPickerData attribute\n ' self.create_data_attribute() pm.connectAttr((self._constrained_parent.name() + '.message'), self._object.attr('pickedData.constrainedParent'), f=True) pm.connectAttr((self._stabilizer_parent.name() + '.message'), self._object.attr('pickedData.stabilizerParent'), f=True) pm.connectAttr((self._parent_constraint.name() + '.message'), self._object.attr('pickedData.parentConstraint'), f=True) pm.connectAttr((self._local_parent.name() + '.message'), self._object.attr('pickedData.localParent'), f=True) tra = self._stabilizer_parent.getAttr('t') rot = self._stabilizer_parent.getAttr('r') sca = self._stabilizer_parent.getAttr('s') self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDposition', tra) self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDrotation', rot) self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDscale', sca)
saves settings inside objects oyPickerData attribute
anima/env/mayaEnv/picker.py
save_settings
MehmetErer/anima
101
python
def save_settings(self): '\n ' self.create_data_attribute() pm.connectAttr((self._constrained_parent.name() + '.message'), self._object.attr('pickedData.constrainedParent'), f=True) pm.connectAttr((self._stabilizer_parent.name() + '.message'), self._object.attr('pickedData.stabilizerParent'), f=True) pm.connectAttr((self._parent_constraint.name() + '.message'), self._object.attr('pickedData.parentConstraint'), f=True) pm.connectAttr((self._local_parent.name() + '.message'), self._object.attr('pickedData.localParent'), f=True) tra = self._stabilizer_parent.getAttr('t') rot = self._stabilizer_parent.getAttr('r') sca = self._stabilizer_parent.getAttr('s') self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDposition', tra) self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDrotation', rot) self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDscale', sca)
def save_settings(self): '\n ' self.create_data_attribute() pm.connectAttr((self._constrained_parent.name() + '.message'), self._object.attr('pickedData.constrainedParent'), f=True) pm.connectAttr((self._stabilizer_parent.name() + '.message'), self._object.attr('pickedData.stabilizerParent'), f=True) pm.connectAttr((self._parent_constraint.name() + '.message'), self._object.attr('pickedData.parentConstraint'), f=True) pm.connectAttr((self._local_parent.name() + '.message'), self._object.attr('pickedData.localParent'), f=True) tra = self._stabilizer_parent.getAttr('t') rot = self._stabilizer_parent.getAttr('r') sca = self._stabilizer_parent.getAttr('s') self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDposition', tra) self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDrotation', rot) self._object.setAttr('pickedData.stabilizerParentInitialData.sPIDscale', sca)<|docstring|>saves settings inside objects oyPickerData attribute<|endoftext|>
152fd6ae8eb99035bd5f7d0986df3ff0a647e8bd11e47a7148343055b51c1cff
def read_settings(self): 'reads settings from objects pickedData attribute\n\n if there is no attribute to read it returns False\n ' if self._object.hasAttr('pickedData'): self._stabilizer_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.stabilizerParent'))[0]) self._constrained_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.constrainedParent'))[0]) self._parent_constraint = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.parentConstraint'))[0]) self._local_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.localParent'))[0]) self._is_setup = True return True return False
reads settings from objects pickedData attribute if there is no attribute to read it returns False
anima/env/mayaEnv/picker.py
read_settings
MehmetErer/anima
101
python
def read_settings(self): 'reads settings from objects pickedData attribute\n\n if there is no attribute to read it returns False\n ' if self._object.hasAttr('pickedData'): self._stabilizer_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.stabilizerParent'))[0]) self._constrained_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.constrainedParent'))[0]) self._parent_constraint = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.parentConstraint'))[0]) self._local_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.localParent'))[0]) self._is_setup = True return True return False
def read_settings(self): 'reads settings from objects pickedData attribute\n\n if there is no attribute to read it returns False\n ' if self._object.hasAttr('pickedData'): self._stabilizer_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.stabilizerParent'))[0]) self._constrained_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.constrainedParent'))[0]) self._parent_constraint = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.parentConstraint'))[0]) self._local_parent = pm.nodetypes.DagNode(pm.listConnections(self._object.attr('pickedData.localParent'))[0]) self._is_setup = True return True return False<|docstring|>reads settings from objects pickedData attribute if there is no attribute to read it returns False<|endoftext|>
53b1672579ec18357b86412df1b397f3ae49cebb33c4db97d742fa0481098a87
def create_data_attribute(self): 'creates attribute in self._object to hold the rawData\n ' if (not self._object.hasAttr('pickedData')): pm.addAttr(self._object, ln='pickedData', at='compound', nc=6) if (not self._object.hasAttr('constrainedParent')): pm.addAttr(self._object, ln='constrainedParent', at='message', p='pickedData') if (not self._object.hasAttr('stabilizerParent')): pm.addAttr(self._object, ln='stabilizerParent', at='message', p='pickedData') if (not self._object.hasAttr('parentConstraint')): pm.addAttr(self._object, ln='parentConstraint', at='message', p='pickedData') if (not self._object.hasAttr('localParent')): pm.addAttr(self._object, ln='localParent', at='message', p='pickedData') if (not self._object.hasAttr('createdNodes')): pm.addAttr(self._object, ln='createdNodes', at='message', m=1, p='pickedData') if (not self._object.hasAttr('stabilizerParentInitialData')): pm.addAttr(self._object, ln='stabilizerParentInitialData', at='compound', nc=3, p='pickedData') pm.addAttr(self._object, ln='sPIDposition', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDpositionX', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDpositionY', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDpositionZ', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDrotation', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDrotationX', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDrotationY', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDrotationZ', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDscale', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDscaleX', at='float', p='sPIDscale') pm.addAttr(self._object, ln='sPIDscaleY', at='float', p='sPIDscale') pm.addAttr(self._object, ln='sPIDscaleZ', at='float', p='sPIDscale')
creates attribute in self._object to hold the rawData
anima/env/mayaEnv/picker.py
create_data_attribute
MehmetErer/anima
101
python
def create_data_attribute(self): '\n ' if (not self._object.hasAttr('pickedData')): pm.addAttr(self._object, ln='pickedData', at='compound', nc=6) if (not self._object.hasAttr('constrainedParent')): pm.addAttr(self._object, ln='constrainedParent', at='message', p='pickedData') if (not self._object.hasAttr('stabilizerParent')): pm.addAttr(self._object, ln='stabilizerParent', at='message', p='pickedData') if (not self._object.hasAttr('parentConstraint')): pm.addAttr(self._object, ln='parentConstraint', at='message', p='pickedData') if (not self._object.hasAttr('localParent')): pm.addAttr(self._object, ln='localParent', at='message', p='pickedData') if (not self._object.hasAttr('createdNodes')): pm.addAttr(self._object, ln='createdNodes', at='message', m=1, p='pickedData') if (not self._object.hasAttr('stabilizerParentInitialData')): pm.addAttr(self._object, ln='stabilizerParentInitialData', at='compound', nc=3, p='pickedData') pm.addAttr(self._object, ln='sPIDposition', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDpositionX', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDpositionY', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDpositionZ', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDrotation', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDrotationX', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDrotationY', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDrotationZ', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDscale', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDscaleX', at='float', p='sPIDscale') pm.addAttr(self._object, ln='sPIDscaleY', at='float', p='sPIDscale') pm.addAttr(self._object, ln='sPIDscaleZ', at='float', p='sPIDscale')
def create_data_attribute(self): '\n ' if (not self._object.hasAttr('pickedData')): pm.addAttr(self._object, ln='pickedData', at='compound', nc=6) if (not self._object.hasAttr('constrainedParent')): pm.addAttr(self._object, ln='constrainedParent', at='message', p='pickedData') if (not self._object.hasAttr('stabilizerParent')): pm.addAttr(self._object, ln='stabilizerParent', at='message', p='pickedData') if (not self._object.hasAttr('parentConstraint')): pm.addAttr(self._object, ln='parentConstraint', at='message', p='pickedData') if (not self._object.hasAttr('localParent')): pm.addAttr(self._object, ln='localParent', at='message', p='pickedData') if (not self._object.hasAttr('createdNodes')): pm.addAttr(self._object, ln='createdNodes', at='message', m=1, p='pickedData') if (not self._object.hasAttr('stabilizerParentInitialData')): pm.addAttr(self._object, ln='stabilizerParentInitialData', at='compound', nc=3, p='pickedData') pm.addAttr(self._object, ln='sPIDposition', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDpositionX', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDpositionY', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDpositionZ', at='float', p='sPIDposition') pm.addAttr(self._object, ln='sPIDrotation', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDrotationX', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDrotationY', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDrotationZ', at='float', p='sPIDrotation') pm.addAttr(self._object, ln='sPIDscale', at='compound', nc=3, p='stabilizerParentInitialData') pm.addAttr(self._object, ln='sPIDscaleX', at='float', p='sPIDscale') pm.addAttr(self._object, ln='sPIDscaleY', at='float', p='sPIDscale') pm.addAttr(self._object, ln='sPIDscaleZ', at='float', p='sPIDscale')<|docstring|>creates attribute in self._object to hold the rawData<|endoftext|>
6eb79d763f1464e1c527a6ddb522f1daca52623fa36f01fcbcab49c1831ad3a1
def setup_to_be_picked_up(self): 'setups specified object for pick/release sequence\n ' if self._is_setup: return else: self._is_setup = True is_ref = self.is_referenced(self._object) parents = self.get_parents(self._object) parent_cnt = len(parents) self.create_data_attribute() if is_ref: if (parent_cnt == 0): self.create_stabilizer_parent() self.create_constrained_parent() elif (parent_cnt == 1): self._stabilizer_parent = pm.nodetypes.DagNode(parents[0]) self.create_constrained_parent() elif (parent_cnt >= 2): self._stabilizer_parent = pm.nodetypes.DagNode(parents[0]) self._constrained_parent = pm.nodetypes.DagNode(parents[1]) else: self.create_stabilizer_parent() self.create_constrained_parent() self.create_local_parent() self.create_parent_constraint() self.set_keyframe_colors() self.save_settings()
setups specified object for pick/release sequence
anima/env/mayaEnv/picker.py
setup_to_be_picked_up
MehmetErer/anima
101
python
def setup_to_be_picked_up(self): '\n ' if self._is_setup: return else: self._is_setup = True is_ref = self.is_referenced(self._object) parents = self.get_parents(self._object) parent_cnt = len(parents) self.create_data_attribute() if is_ref: if (parent_cnt == 0): self.create_stabilizer_parent() self.create_constrained_parent() elif (parent_cnt == 1): self._stabilizer_parent = pm.nodetypes.DagNode(parents[0]) self.create_constrained_parent() elif (parent_cnt >= 2): self._stabilizer_parent = pm.nodetypes.DagNode(parents[0]) self._constrained_parent = pm.nodetypes.DagNode(parents[1]) else: self.create_stabilizer_parent() self.create_constrained_parent() self.create_local_parent() self.create_parent_constraint() self.set_keyframe_colors() self.save_settings()
def setup_to_be_picked_up(self): '\n ' if self._is_setup: return else: self._is_setup = True is_ref = self.is_referenced(self._object) parents = self.get_parents(self._object) parent_cnt = len(parents) self.create_data_attribute() if is_ref: if (parent_cnt == 0): self.create_stabilizer_parent() self.create_constrained_parent() elif (parent_cnt == 1): self._stabilizer_parent = pm.nodetypes.DagNode(parents[0]) self.create_constrained_parent() elif (parent_cnt >= 2): self._stabilizer_parent = pm.nodetypes.DagNode(parents[0]) self._constrained_parent = pm.nodetypes.DagNode(parents[1]) else: self.create_stabilizer_parent() self.create_constrained_parent() self.create_local_parent() self.create_parent_constraint() self.set_keyframe_colors() self.save_settings()<|docstring|>setups specified object for pick/release sequence<|endoftext|>
39aeb672a4930d44819d4526feb6de8476e6e7b7ebc95751443ea298a41dd65d
def explode_setup(self): 'breaks all the setup objects\n ' if (not self._is_setup): return pm.delete(self._parent_constraint) pm.delete(self._stabilizer_parent.attr('tx').inputs(), self._stabilizer_parent.attr('ty').inputs(), self._stabilizer_parent.attr('tz').inputs(), self._stabilizer_parent.attr('rx').inputs(), self._stabilizer_parent.attr('ry').inputs(), self._stabilizer_parent.attr('rz').inputs(), self._stabilizer_parent.attr('sx').inputs(), self._stabilizer_parent.attr('sy').inputs(), self._stabilizer_parent.attr('sz').inputs()) if self._object.hasAttr('stabilizerParentInitialData'): self._stabilizer_parent.setAttr('tx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionX')) self._stabilizer_parent.setAttr('ty', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionY')) self._stabilizer_parent.setAttr('tz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionZ')) self._stabilizer_parent.setAttr('rx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationX')) self._stabilizer_parent.setAttr('ry', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationY')) self._stabilizer_parent.setAttr('rz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationZ')) self._stabilizer_parent.setAttr('sx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleX')) self._stabilizer_parent.setAttr('sy', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleY')) self._stabilizer_parent.setAttr('sz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleZ')) if self._object.hasAttr('pickedData.createdNodes'): object_to_parent = None parent_obj = None nodes_to_delete = self._object.attr('pickedData.createdNodes').inputs() if (self._constrained_parent in nodes_to_delete): object_to_parent = self._stabilizer_parent parent_obj = self._constrained_parent.getParent() if (self._stabilizer_parent in nodes_to_delete): object_to_parent = self._object if (object_to_parent is not None): if (parent_obj is not None): pm.parent(object_to_parent, parent_obj) else: pm.parent(object_to_parent, w=True) pm.delete(nodes_to_delete) self._object.attr('pickedData').delete() self._object.attr('specialCommands').delete() self._object.attr('specialCommandLabels').delete()
breaks all the setup objects
anima/env/mayaEnv/picker.py
explode_setup
MehmetErer/anima
101
python
def explode_setup(self): '\n ' if (not self._is_setup): return pm.delete(self._parent_constraint) pm.delete(self._stabilizer_parent.attr('tx').inputs(), self._stabilizer_parent.attr('ty').inputs(), self._stabilizer_parent.attr('tz').inputs(), self._stabilizer_parent.attr('rx').inputs(), self._stabilizer_parent.attr('ry').inputs(), self._stabilizer_parent.attr('rz').inputs(), self._stabilizer_parent.attr('sx').inputs(), self._stabilizer_parent.attr('sy').inputs(), self._stabilizer_parent.attr('sz').inputs()) if self._object.hasAttr('stabilizerParentInitialData'): self._stabilizer_parent.setAttr('tx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionX')) self._stabilizer_parent.setAttr('ty', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionY')) self._stabilizer_parent.setAttr('tz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionZ')) self._stabilizer_parent.setAttr('rx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationX')) self._stabilizer_parent.setAttr('ry', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationY')) self._stabilizer_parent.setAttr('rz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationZ')) self._stabilizer_parent.setAttr('sx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleX')) self._stabilizer_parent.setAttr('sy', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleY')) self._stabilizer_parent.setAttr('sz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleZ')) if self._object.hasAttr('pickedData.createdNodes'): object_to_parent = None parent_obj = None nodes_to_delete = self._object.attr('pickedData.createdNodes').inputs() if (self._constrained_parent in nodes_to_delete): object_to_parent = self._stabilizer_parent parent_obj = self._constrained_parent.getParent() if (self._stabilizer_parent in nodes_to_delete): object_to_parent = self._object if (object_to_parent is not None): if (parent_obj is not None): pm.parent(object_to_parent, parent_obj) else: pm.parent(object_to_parent, w=True) pm.delete(nodes_to_delete) self._object.attr('pickedData').delete() self._object.attr('specialCommands').delete() self._object.attr('specialCommandLabels').delete()
def explode_setup(self): '\n ' if (not self._is_setup): return pm.delete(self._parent_constraint) pm.delete(self._stabilizer_parent.attr('tx').inputs(), self._stabilizer_parent.attr('ty').inputs(), self._stabilizer_parent.attr('tz').inputs(), self._stabilizer_parent.attr('rx').inputs(), self._stabilizer_parent.attr('ry').inputs(), self._stabilizer_parent.attr('rz').inputs(), self._stabilizer_parent.attr('sx').inputs(), self._stabilizer_parent.attr('sy').inputs(), self._stabilizer_parent.attr('sz').inputs()) if self._object.hasAttr('stabilizerParentInitialData'): self._stabilizer_parent.setAttr('tx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionX')) self._stabilizer_parent.setAttr('ty', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionY')) self._stabilizer_parent.setAttr('tz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDposition.sPIDpositionZ')) self._stabilizer_parent.setAttr('rx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationX')) self._stabilizer_parent.setAttr('ry', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationY')) self._stabilizer_parent.setAttr('rz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDrotation.sPIDrotationZ')) self._stabilizer_parent.setAttr('sx', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleX')) self._stabilizer_parent.setAttr('sy', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleY')) self._stabilizer_parent.setAttr('sz', self._object.getAttr('pickedData.stabilizerParentInitialData.sPIDscale.sPIDscaleZ')) if self._object.hasAttr('pickedData.createdNodes'): object_to_parent = None parent_obj = None nodes_to_delete = self._object.attr('pickedData.createdNodes').inputs() if (self._constrained_parent in nodes_to_delete): object_to_parent = self._stabilizer_parent parent_obj = self._constrained_parent.getParent() if (self._stabilizer_parent in nodes_to_delete): object_to_parent = self._object if (object_to_parent is not None): if (parent_obj is not None): pm.parent(object_to_parent, parent_obj) else: pm.parent(object_to_parent, w=True) pm.delete(nodes_to_delete) self._object.attr('pickedData').delete() self._object.attr('specialCommands').delete() self._object.attr('specialCommandLabels').delete()<|docstring|>breaks all the setup objects<|endoftext|>
821bdd6934ed7f161d1fc68ca5d7731e38824ca249df1b237cfebe2520aadcf9
def create_local_parent(self): 'creates local parent and axial correction group of local parent\n ' self._local_parent = pm.group(em=True, n=(self._object.name() + '_local_parent')) matrix = pm.xform(self._constrained_parent, q=True, ws=True, m=True) pm.xform(self._local_parent, ws=True, m=matrix) parents = pm.listRelatives(self._constrained_parent, p=True) if (len(parents) != 0): temp = pm.parent(self._local_parent, parents[0], a=True) self._local_parent = temp[0] self._local_parent = pm.nodetypes.DagNode(self._local_parent) index = self._object.attr('pickedData.createdNodes').numElements() (self._local_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))
creates local parent and axial correction group of local parent
anima/env/mayaEnv/picker.py
create_local_parent
MehmetErer/anima
101
python
def create_local_parent(self): '\n ' self._local_parent = pm.group(em=True, n=(self._object.name() + '_local_parent')) matrix = pm.xform(self._constrained_parent, q=True, ws=True, m=True) pm.xform(self._local_parent, ws=True, m=matrix) parents = pm.listRelatives(self._constrained_parent, p=True) if (len(parents) != 0): temp = pm.parent(self._local_parent, parents[0], a=True) self._local_parent = temp[0] self._local_parent = pm.nodetypes.DagNode(self._local_parent) index = self._object.attr('pickedData.createdNodes').numElements() (self._local_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))
def create_local_parent(self): '\n ' self._local_parent = pm.group(em=True, n=(self._object.name() + '_local_parent')) matrix = pm.xform(self._constrained_parent, q=True, ws=True, m=True) pm.xform(self._local_parent, ws=True, m=matrix) parents = pm.listRelatives(self._constrained_parent, p=True) if (len(parents) != 0): temp = pm.parent(self._local_parent, parents[0], a=True) self._local_parent = temp[0] self._local_parent = pm.nodetypes.DagNode(self._local_parent) index = self._object.attr('pickedData.createdNodes').numElements() (self._local_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))<|docstring|>creates local parent and axial correction group of local parent<|endoftext|>
9cb1941a9bf7c7c395c2d591544a50a18c7d55f72b9e5ff6c57dfe4c9d66209e
def create_parent_constraint(self): 'creates parentConstraint between _local_parent and the\n _constrained_parent\n ' self._parent_constraint = pm.parentConstraint(self._local_parent, self._constrained_parent, w=1) weight_alias = self.get_weight_alias(self._local_parent) frame = 0 weight_alias.setKey(t=frame, ott='step') self.set_stabilizer_keyframe(frame)
creates parentConstraint between _local_parent and the _constrained_parent
anima/env/mayaEnv/picker.py
create_parent_constraint
MehmetErer/anima
101
python
def create_parent_constraint(self): 'creates parentConstraint between _local_parent and the\n _constrained_parent\n ' self._parent_constraint = pm.parentConstraint(self._local_parent, self._constrained_parent, w=1) weight_alias = self.get_weight_alias(self._local_parent) frame = 0 weight_alias.setKey(t=frame, ott='step') self.set_stabilizer_keyframe(frame)
def create_parent_constraint(self): 'creates parentConstraint between _local_parent and the\n _constrained_parent\n ' self._parent_constraint = pm.parentConstraint(self._local_parent, self._constrained_parent, w=1) weight_alias = self.get_weight_alias(self._local_parent) frame = 0 weight_alias.setKey(t=frame, ott='step') self.set_stabilizer_keyframe(frame)<|docstring|>creates parentConstraint between _local_parent and the _constrained_parent<|endoftext|>
144f9cc992d0c2643251f534a874b09588bf8c6ad02f7083d0e09dadfbfa74e0
def set_stabilizer_keyframe(self, frame): 'sets keyframe for stabilizer at the current values\n ' self._stabilizer_parent.attr('tx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('ty').setKey(t=frame, ott='step') self._stabilizer_parent.attr('tz').setKey(t=frame, ott='step') self._stabilizer_parent.attr('rx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('ry').setKey(t=frame, ott='step') self._stabilizer_parent.attr('rz').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sy').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sz').setKey(t=frame, ott='step')
sets keyframe for stabilizer at the current values
anima/env/mayaEnv/picker.py
set_stabilizer_keyframe
MehmetErer/anima
101
python
def set_stabilizer_keyframe(self, frame): '\n ' self._stabilizer_parent.attr('tx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('ty').setKey(t=frame, ott='step') self._stabilizer_parent.attr('tz').setKey(t=frame, ott='step') self._stabilizer_parent.attr('rx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('ry').setKey(t=frame, ott='step') self._stabilizer_parent.attr('rz').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sy').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sz').setKey(t=frame, ott='step')
def set_stabilizer_keyframe(self, frame): '\n ' self._stabilizer_parent.attr('tx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('ty').setKey(t=frame, ott='step') self._stabilizer_parent.attr('tz').setKey(t=frame, ott='step') self._stabilizer_parent.attr('rx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('ry').setKey(t=frame, ott='step') self._stabilizer_parent.attr('rz').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sx').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sy').setKey(t=frame, ott='step') self._stabilizer_parent.attr('sz').setKey(t=frame, ott='step')<|docstring|>sets keyframe for stabilizer at the current values<|endoftext|>
8b4a023c15166f93e662290e56358254aa40ec18111498f3e7d79fa91d102b5c
def get_parents(self, node): 'returns hierarchical parents of the given node\n ' node = pm.nodetypes.DagNode(node) parents = node.getParent(generations=None) if (parents is None): parents = [] return parents
returns hierarchical parents of the given node
anima/env/mayaEnv/picker.py
get_parents
MehmetErer/anima
101
python
def get_parents(self, node): '\n ' node = pm.nodetypes.DagNode(node) parents = node.getParent(generations=None) if (parents is None): parents = [] return parents
def get_parents(self, node): '\n ' node = pm.nodetypes.DagNode(node) parents = node.getParent(generations=None) if (parents is None): parents = [] return parents<|docstring|>returns hierarchical parents of the given node<|endoftext|>
3ea55d960ac4b1b417311019815c52db52e0f2c9605250901a38ea8bd5f9955e
def is_referenced(self, node): 'checks if the node is referenced\n\n returns True or False\n ' node = pm.nodetypes.DagNode(node) return node.isReferenced()
checks if the node is referenced returns True or False
anima/env/mayaEnv/picker.py
is_referenced
MehmetErer/anima
101
python
def is_referenced(self, node): 'checks if the node is referenced\n\n returns True or False\n ' node = pm.nodetypes.DagNode(node) return node.isReferenced()
def is_referenced(self, node): 'checks if the node is referenced\n\n returns True or False\n ' node = pm.nodetypes.DagNode(node) return node.isReferenced()<|docstring|>checks if the node is referenced returns True or False<|endoftext|>
6228b309a4ec7317e1c18161d4e96bf60488a0e7ff4811e63826503964a881b1
def create_stabilizer_parent(self): 'creates the stabilizer parent\n ' self._stabilizer_parent = pm.nodetypes.DagNode(auxiliary.axial_correction_group(self._object, to_parents_origin=True)) self._stabilizer_parent = pm.nodetypes.DagNode(pm.rename(self._stabilizer_parent, (self._object.name() + '_stabilizer_parent'))) index = self._object.attr('pickedData.createdNodes').numElements() (self._stabilizer_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))
creates the stabilizer parent
anima/env/mayaEnv/picker.py
create_stabilizer_parent
MehmetErer/anima
101
python
def create_stabilizer_parent(self): '\n ' self._stabilizer_parent = pm.nodetypes.DagNode(auxiliary.axial_correction_group(self._object, to_parents_origin=True)) self._stabilizer_parent = pm.nodetypes.DagNode(pm.rename(self._stabilizer_parent, (self._object.name() + '_stabilizer_parent'))) index = self._object.attr('pickedData.createdNodes').numElements() (self._stabilizer_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))
def create_stabilizer_parent(self): '\n ' self._stabilizer_parent = pm.nodetypes.DagNode(auxiliary.axial_correction_group(self._object, to_parents_origin=True)) self._stabilizer_parent = pm.nodetypes.DagNode(pm.rename(self._stabilizer_parent, (self._object.name() + '_stabilizer_parent'))) index = self._object.attr('pickedData.createdNodes').numElements() (self._stabilizer_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))<|docstring|>creates the stabilizer parent<|endoftext|>
86810bdddedcebefcf602b07a724369c8f766db26136462ab32dc499457dd91a
def create_constrained_parent(self): 'creates parents for the object\n ' try: pm.nodetypes.DagNode(self._stabilizer_parent) except pm.MayaNodeError: return self._constrained_parent = pm.nodetypes.DagNode(auxiliary.axial_correction_group(self._stabilizer_parent)) self._constrained_parent = pm.nodetypes.DagNode(pm.rename(self._constrained_parent, (self._object.name() + '_constrained_parent'))) index = self._object.attr('pickedData.createdNodes').numElements() (self._constrained_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))
creates parents for the object
anima/env/mayaEnv/picker.py
create_constrained_parent
MehmetErer/anima
101
python
def create_constrained_parent(self): '\n ' try: pm.nodetypes.DagNode(self._stabilizer_parent) except pm.MayaNodeError: return self._constrained_parent = pm.nodetypes.DagNode(auxiliary.axial_correction_group(self._stabilizer_parent)) self._constrained_parent = pm.nodetypes.DagNode(pm.rename(self._constrained_parent, (self._object.name() + '_constrained_parent'))) index = self._object.attr('pickedData.createdNodes').numElements() (self._constrained_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))
def create_constrained_parent(self): '\n ' try: pm.nodetypes.DagNode(self._stabilizer_parent) except pm.MayaNodeError: return self._constrained_parent = pm.nodetypes.DagNode(auxiliary.axial_correction_group(self._stabilizer_parent)) self._constrained_parent = pm.nodetypes.DagNode(pm.rename(self._constrained_parent, (self._object.name() + '_constrained_parent'))) index = self._object.attr('pickedData.createdNodes').numElements() (self._constrained_parent.attr('message') >> self._object.attr((('pickedData.createdNodes[' + str(index)) + ']')))<|docstring|>creates parents for the object<|endoftext|>
8ec4ee0f82fe1c67c5c94c679f58bbdd79cc9e3e3b57f76c34d775c3c5ae84d0
def get_weight_alias_list(self): 'returns weight alias list\n ' if (not self._is_setup): return return self._parent_constraint.getWeightAliasList()
returns weight alias list
anima/env/mayaEnv/picker.py
get_weight_alias_list
MehmetErer/anima
101
python
def get_weight_alias_list(self): '\n ' if (not self._is_setup): return return self._parent_constraint.getWeightAliasList()
def get_weight_alias_list(self): '\n ' if (not self._is_setup): return return self._parent_constraint.getWeightAliasList()<|docstring|>returns weight alias list<|endoftext|>
9b07a492797c7e6871c3c337b23993ede1ce33811526748340f3f96709197b9e
def get_active_parent(self): 'returns the current parent\n ' if (not self._is_setup): return weight_aliases = self.get_weight_alias_list() parents = self.get_parent_list() for i in range(0, len(weight_aliases)): if (weight_aliases[i].get() >= 1): return parents[i] return None
returns the current parent
anima/env/mayaEnv/picker.py
get_active_parent
MehmetErer/anima
101
python
def get_active_parent(self): '\n ' if (not self._is_setup): return weight_aliases = self.get_weight_alias_list() parents = self.get_parent_list() for i in range(0, len(weight_aliases)): if (weight_aliases[i].get() >= 1): return parents[i] return None
def get_active_parent(self): '\n ' if (not self._is_setup): return weight_aliases = self.get_weight_alias_list() parents = self.get_parent_list() for i in range(0, len(weight_aliases)): if (weight_aliases[i].get() >= 1): return parents[i] return None<|docstring|>returns the current parent<|endoftext|>
9fc97ac0ac3ad1234f9f14d2dde44b6f50bfbc2a25d2b391c4feaad85ede774d
def get_parent_list(self): 'returns parent list\n ' if (not self._is_setup): return return self._parent_constraint.getTargetList()
returns parent list
anima/env/mayaEnv/picker.py
get_parent_list
MehmetErer/anima
101
python
def get_parent_list(self): '\n ' if (not self._is_setup): return return self._parent_constraint.getTargetList()
def get_parent_list(self): '\n ' if (not self._is_setup): return return self._parent_constraint.getTargetList()<|docstring|>returns parent list<|endoftext|>
0da4f38b40e7d679d5ce8b9076a2f3c7358b90a44ffee92d124d7702a0619613
def get_weight_alias(self, parent): "finds weightAlias of given parent\n\n if it couldn't find any it returns None\n " if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) assert isinstance(parent, pm.nodetypes.Transform) weight_alias_list = self.get_weight_alias_list() parent_list = self.get_parent_list() weight_alias = None for i in range(len(parent_list)): if (parent_list[i] == parent): weight_alias = weight_alias_list[i] break return weight_alias
finds weightAlias of given parent if it couldn't find any it returns None
anima/env/mayaEnv/picker.py
get_weight_alias
MehmetErer/anima
101
python
def get_weight_alias(self, parent): "finds weightAlias of given parent\n\n if it couldn't find any it returns None\n " if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) assert isinstance(parent, pm.nodetypes.Transform) weight_alias_list = self.get_weight_alias_list() parent_list = self.get_parent_list() weight_alias = None for i in range(len(parent_list)): if (parent_list[i] == parent): weight_alias = weight_alias_list[i] break return weight_alias
def get_weight_alias(self, parent): "finds weightAlias of given parent\n\n if it couldn't find any it returns None\n " if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) assert isinstance(parent, pm.nodetypes.Transform) weight_alias_list = self.get_weight_alias_list() parent_list = self.get_parent_list() weight_alias = None for i in range(len(parent_list)): if (parent_list[i] == parent): weight_alias = weight_alias_list[i] break return weight_alias<|docstring|>finds weightAlias of given parent if it couldn't find any it returns None<|endoftext|>
5b5c7d6f99bc3e8fbd1bd5dd5c2ad28236ab3e98573f785c921321b2c261a846
def add_new_parent(self, parent): 'adds a new parent\n ' if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) if (self.get_weight_alias(parent) is not None): return if self.check_cycle(parent): pm.PopupError('Cycle Warning!!!\nnode is one of the special objects') return pm.parentConstraint(parent, self._constrained_parent, w=0, mo=True) weight_alias = self.get_weight_alias(parent) weight_alias.setKey(t=0, v=0, ott='step') self.add_parent_to_dag_menu(parent)
adds a new parent
anima/env/mayaEnv/picker.py
add_new_parent
MehmetErer/anima
101
python
def add_new_parent(self, parent): '\n ' if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) if (self.get_weight_alias(parent) is not None): return if self.check_cycle(parent): pm.PopupError('Cycle Warning!!!\nnode is one of the special objects') return pm.parentConstraint(parent, self._constrained_parent, w=0, mo=True) weight_alias = self.get_weight_alias(parent) weight_alias.setKey(t=0, v=0, ott='step') self.add_parent_to_dag_menu(parent)
def add_new_parent(self, parent): '\n ' if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) if (self.get_weight_alias(parent) is not None): return if self.check_cycle(parent): pm.PopupError('Cycle Warning!!!\nnode is one of the special objects') return pm.parentConstraint(parent, self._constrained_parent, w=0, mo=True) weight_alias = self.get_weight_alias(parent) weight_alias.setKey(t=0, v=0, ott='step') self.add_parent_to_dag_menu(parent)<|docstring|>adds a new parent<|endoftext|>
164fd1791911f764fd5a53b0c69428e34ad9d25014679292b132c9d686dda0d7
def add_parent_to_dag_menu(self, parent): 'adds the given parent to the DAG menu\n\n oyParSw - switch to --> %PARENTNAME%\n ' command_label = ('oyObjectPicker - switch to --> ' + parent.name()) parent_index = self.get_parent_index(parent) if (parent_index == (- 1)): return command_string = (('{\n int $parentIndex = ' + str(parent_index)) + ';\n string $parentConstraint[] = `listConnections ("%s.pickedData.parentConstraint")`;\n string $parents[] = `parentConstraint -q -tl $parentConstraint[0]`;\n string $parentName = $parents[ $parentIndex ];\n python("import oyObjectPicker as oyOP; oyOP.set_objects_parent( \'%s\', \'"+$parentName+"\')");\n }')
adds the given parent to the DAG menu oyParSw - switch to --> %PARENTNAME%
anima/env/mayaEnv/picker.py
add_parent_to_dag_menu
MehmetErer/anima
101
python
def add_parent_to_dag_menu(self, parent): 'adds the given parent to the DAG menu\n\n oyParSw - switch to --> %PARENTNAME%\n ' command_label = ('oyObjectPicker - switch to --> ' + parent.name()) parent_index = self.get_parent_index(parent) if (parent_index == (- 1)): return command_string = (('{\n int $parentIndex = ' + str(parent_index)) + ';\n string $parentConstraint[] = `listConnections ("%s.pickedData.parentConstraint")`;\n string $parents[] = `parentConstraint -q -tl $parentConstraint[0]`;\n string $parentName = $parents[ $parentIndex ];\n python("import oyObjectPicker as oyOP; oyOP.set_objects_parent( \'%s\', \'"+$parentName+"\')");\n }')
def add_parent_to_dag_menu(self, parent): 'adds the given parent to the DAG menu\n\n oyParSw - switch to --> %PARENTNAME%\n ' command_label = ('oyObjectPicker - switch to --> ' + parent.name()) parent_index = self.get_parent_index(parent) if (parent_index == (- 1)): return command_string = (('{\n int $parentIndex = ' + str(parent_index)) + ';\n string $parentConstraint[] = `listConnections ("%s.pickedData.parentConstraint")`;\n string $parents[] = `parentConstraint -q -tl $parentConstraint[0]`;\n string $parentName = $parents[ $parentIndex ];\n python("import oyObjectPicker as oyOP; oyOP.set_objects_parent( \'%s\', \'"+$parentName+"\')");\n }')<|docstring|>adds the given parent to the DAG menu oyParSw - switch to --> %PARENTNAME%<|endoftext|>
bf65885a0cfd3b985f86306ac46ec2250a9859273a7d174b5800bebd0869a606
def add_default_options_to_dag_menu(self): 'adds the default menu options to the DAG menu\n\n oyObjectPicker --> fix jump\n oyObjectPicker --> edit keyframes\n ' pm.mel.source('oyAddDAGMenuCommands') command_label = 'oyObjectPicker --> release object' command_string = 'python("import oyObjectPicker as oyOP; oyOP.relaseObjectWithName(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string) command_label = 'oyObjectPicker --> edit_keyframes' command_string = 'python("import oyObjectPicker as oyOP; oyOP.edit_keyframes_of_object(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string) command_label = 'oyObjectPicker --> fix jump' command_string = 'python("import oyObjectPicker as oyOP; oyOP.fix_jump_on_object(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string)
adds the default menu options to the DAG menu oyObjectPicker --> fix jump oyObjectPicker --> edit keyframes
anima/env/mayaEnv/picker.py
add_default_options_to_dag_menu
MehmetErer/anima
101
python
def add_default_options_to_dag_menu(self): 'adds the default menu options to the DAG menu\n\n oyObjectPicker --> fix jump\n oyObjectPicker --> edit keyframes\n ' pm.mel.source('oyAddDAGMenuCommands') command_label = 'oyObjectPicker --> release object' command_string = 'python("import oyObjectPicker as oyOP; oyOP.relaseObjectWithName(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string) command_label = 'oyObjectPicker --> edit_keyframes' command_string = 'python("import oyObjectPicker as oyOP; oyOP.edit_keyframes_of_object(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string) command_label = 'oyObjectPicker --> fix jump' command_string = 'python("import oyObjectPicker as oyOP; oyOP.fix_jump_on_object(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string)
def add_default_options_to_dag_menu(self): 'adds the default menu options to the DAG menu\n\n oyObjectPicker --> fix jump\n oyObjectPicker --> edit keyframes\n ' pm.mel.source('oyAddDAGMenuCommands') command_label = 'oyObjectPicker --> release object' command_string = 'python("import oyObjectPicker as oyOP; oyOP.relaseObjectWithName(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string) command_label = 'oyObjectPicker --> edit_keyframes' command_string = 'python("import oyObjectPicker as oyOP; oyOP.edit_keyframes_of_object(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string) command_label = 'oyObjectPicker --> fix jump' command_string = 'python("import oyObjectPicker as oyOP; oyOP.fix_jump_on_object(\'%s\')");' pm.mel.oyADMC_addSpecialCommandsToObject(self._object.name(), command_label, command_string)<|docstring|>adds the default menu options to the DAG menu oyObjectPicker --> fix jump oyObjectPicker --> edit keyframes<|endoftext|>
b5cd50978eed966b9d7198e1af862c15dab477753a294aed98b5cf2cc5752801
def get_parent_index(self, parent): 'returns the given parents index\n ' parent = pm.nodetypes.DagNode(parent) parents = self.get_parent_list() for i in range(0, len(parents)): if (parents[i] == parent.name()): return i return (- 1)
returns the given parents index
anima/env/mayaEnv/picker.py
get_parent_index
MehmetErer/anima
101
python
def get_parent_index(self, parent): '\n ' parent = pm.nodetypes.DagNode(parent) parents = self.get_parent_list() for i in range(0, len(parents)): if (parents[i] == parent.name()): return i return (- 1)
def get_parent_index(self, parent): '\n ' parent = pm.nodetypes.DagNode(parent) parents = self.get_parent_list() for i in range(0, len(parents)): if (parents[i] == parent.name()): return i return (- 1)<|docstring|>returns the given parents index<|endoftext|>
f07f97310bbc82504dca7fd5770aa57311e5ed77c9f9f0833c7b00ae5605a8be
def get_parent_name_at_index(self, index): 'returns the parent name at the index\n\n the index is used in the parent list of the parent constraint\n ' parents = self.get_parent_list() return parents[index]
returns the parent name at the index the index is used in the parent list of the parent constraint
anima/env/mayaEnv/picker.py
get_parent_name_at_index
MehmetErer/anima
101
python
def get_parent_name_at_index(self, index): 'returns the parent name at the index\n\n the index is used in the parent list of the parent constraint\n ' parents = self.get_parent_list() return parents[index]
def get_parent_name_at_index(self, index): 'returns the parent name at the index\n\n the index is used in the parent list of the parent constraint\n ' parents = self.get_parent_list() return parents[index]<|docstring|>returns the parent name at the index the index is used in the parent list of the parent constraint<|endoftext|>
308220fedd0333d9f93ddfddbc7a11b8b2334c94ea10bf0f37b99f988d4728a9
def check_cycle(self, node): 'checks if the given parent is a child of the self._object\n or if it is setup before to be the pickedObject and self._object\n is a parent for it\n ' if self.is_special_object(node): return True node = pm.nodetypes.DagNode(node) node_as_picked_object = PickedObject(node) parent_list = node_as_picked_object.get_parent_list() if (parent_list is None): return False elif (len(parent_list) != 0): if (self._object in parent_list): return True for p in parent_list: if self.is_special_object(p): return True else: return False
checks if the given parent is a child of the self._object or if it is setup before to be the pickedObject and self._object is a parent for it
anima/env/mayaEnv/picker.py
check_cycle
MehmetErer/anima
101
python
def check_cycle(self, node): 'checks if the given parent is a child of the self._object\n or if it is setup before to be the pickedObject and self._object\n is a parent for it\n ' if self.is_special_object(node): return True node = pm.nodetypes.DagNode(node) node_as_picked_object = PickedObject(node) parent_list = node_as_picked_object.get_parent_list() if (parent_list is None): return False elif (len(parent_list) != 0): if (self._object in parent_list): return True for p in parent_list: if self.is_special_object(p): return True else: return False
def check_cycle(self, node): 'checks if the given parent is a child of the self._object\n or if it is setup before to be the pickedObject and self._object\n is a parent for it\n ' if self.is_special_object(node): return True node = pm.nodetypes.DagNode(node) node_as_picked_object = PickedObject(node) parent_list = node_as_picked_object.get_parent_list() if (parent_list is None): return False elif (len(parent_list) != 0): if (self._object in parent_list): return True for p in parent_list: if self.is_special_object(p): return True else: return False<|docstring|>checks if the given parent is a child of the self._object or if it is setup before to be the pickedObject and self._object is a parent for it<|endoftext|>
affb0ebcf905cfd45ba7e64370ed8d14ea24d19b2e6fdc6ef8604d2a6e51b21b
def is_special_object(self, node): 'checks if node is one of the special object:\n\n constrainedParent\n stabilizerParent\n localParent\n ' node = pm.nodetypes.DagNode(node) if ((node == self._object) or (node == self._constrained_parent) or (node == self._local_parent) or (node == self._stabilizer_parent)): return True else: return False
checks if node is one of the special object: constrainedParent stabilizerParent localParent
anima/env/mayaEnv/picker.py
is_special_object
MehmetErer/anima
101
python
def is_special_object(self, node): 'checks if node is one of the special object:\n\n constrainedParent\n stabilizerParent\n localParent\n ' node = pm.nodetypes.DagNode(node) if ((node == self._object) or (node == self._constrained_parent) or (node == self._local_parent) or (node == self._stabilizer_parent)): return True else: return False
def is_special_object(self, node): 'checks if node is one of the special object:\n\n constrainedParent\n stabilizerParent\n localParent\n ' node = pm.nodetypes.DagNode(node) if ((node == self._object) or (node == self._constrained_parent) or (node == self._local_parent) or (node == self._stabilizer_parent)): return True else: return False<|docstring|>checks if node is one of the special object: constrainedParent stabilizerParent localParent<|endoftext|>
4ecc3c240ad1d80df6f08b0523c345dc7e7479a98e642efe88e7fc8511f4cfe2
def set_active_parent(self, parent): 'sets specified parent as the active parent\n ' if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) active_parent = self.get_active_parent() if (parent == active_parent): return parent_weight_alias = self.get_weight_alias(parent) if (parent_weight_alias is None): return self.set_dg_dirty() matrix = self.get_stabilizer_matrix() self.set_parent_weight(parent) self.set_dg_dirty() self.set_stabilizer_matrix(matrix) self.set_stabilizer_keyframe(pm.currentTime(q=True))
sets specified parent as the active parent
anima/env/mayaEnv/picker.py
set_active_parent
MehmetErer/anima
101
python
def set_active_parent(self, parent): '\n ' if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) active_parent = self.get_active_parent() if (parent == active_parent): return parent_weight_alias = self.get_weight_alias(parent) if (parent_weight_alias is None): return self.set_dg_dirty() matrix = self.get_stabilizer_matrix() self.set_parent_weight(parent) self.set_dg_dirty() self.set_stabilizer_matrix(matrix) self.set_stabilizer_keyframe(pm.currentTime(q=True))
def set_active_parent(self, parent): '\n ' if (not self._is_setup): return parent = pm.nodetypes.DagNode(parent) active_parent = self.get_active_parent() if (parent == active_parent): return parent_weight_alias = self.get_weight_alias(parent) if (parent_weight_alias is None): return self.set_dg_dirty() matrix = self.get_stabilizer_matrix() self.set_parent_weight(parent) self.set_dg_dirty() self.set_stabilizer_matrix(matrix) self.set_stabilizer_keyframe(pm.currentTime(q=True))<|docstring|>sets specified parent as the active parent<|endoftext|>
c950dc539caf06a3f1ea86a0a1ee7d1aea8efd28d7c31f20fffe70d149ecfcaf
def set_dg_dirty(self): 'sets the DG to dirty for parentConstraint, constrainedParent and\n stabilizerParent\n ' pm.dgdirty(self._parent_constraint, self._constrained_parent, self._stabilizer_parent)
sets the DG to dirty for parentConstraint, constrainedParent and stabilizerParent
anima/env/mayaEnv/picker.py
set_dg_dirty
MehmetErer/anima
101
python
def set_dg_dirty(self): 'sets the DG to dirty for parentConstraint, constrainedParent and\n stabilizerParent\n ' pm.dgdirty(self._parent_constraint, self._constrained_parent, self._stabilizer_parent)
def set_dg_dirty(self): 'sets the DG to dirty for parentConstraint, constrainedParent and\n stabilizerParent\n ' pm.dgdirty(self._parent_constraint, self._constrained_parent, self._stabilizer_parent)<|docstring|>sets the DG to dirty for parentConstraint, constrainedParent and stabilizerParent<|endoftext|>
f263d62b986f512752309a1ba51edb72b39f5f1bbc450d74228040b94103a539
def set_parent_weight(self, parent): 'sets the weight of the parent to 1 and the others to 0\n ' parent = pm.nodetypes.DagNode(parent) parent_weight_alias = self.get_weight_alias(parent) weight_alias_list = self.get_weight_alias_list() for weightAlias in weight_alias_list: if (weightAlias == parent_weight_alias): weightAlias.setKey(v=1, ott='step') else: current_weight = weightAlias.get() if (current_weight > 0): weightAlias.setKey(v=0, ott='step')
sets the weight of the parent to 1 and the others to 0
anima/env/mayaEnv/picker.py
set_parent_weight
MehmetErer/anima
101
python
def set_parent_weight(self, parent): '\n ' parent = pm.nodetypes.DagNode(parent) parent_weight_alias = self.get_weight_alias(parent) weight_alias_list = self.get_weight_alias_list() for weightAlias in weight_alias_list: if (weightAlias == parent_weight_alias): weightAlias.setKey(v=1, ott='step') else: current_weight = weightAlias.get() if (current_weight > 0): weightAlias.setKey(v=0, ott='step')
def set_parent_weight(self, parent): '\n ' parent = pm.nodetypes.DagNode(parent) parent_weight_alias = self.get_weight_alias(parent) weight_alias_list = self.get_weight_alias_list() for weightAlias in weight_alias_list: if (weightAlias == parent_weight_alias): weightAlias.setKey(v=1, ott='step') else: current_weight = weightAlias.get() if (current_weight > 0): weightAlias.setKey(v=0, ott='step')<|docstring|>sets the weight of the parent to 1 and the others to 0<|endoftext|>
cf804a1d4e4587e7fa2c2c58f08bd3de902717a76e8aea5fbf8501bac7399119
def release_object(self): 'release the object\n ' if (not self._is_setup): return self.set_active_parent(self._local_parent)
release the object
anima/env/mayaEnv/picker.py
release_object
MehmetErer/anima
101
python
def release_object(self): '\n ' if (not self._is_setup): return self.set_active_parent(self._local_parent)
def release_object(self): '\n ' if (not self._is_setup): return self.set_active_parent(self._local_parent)<|docstring|>release the object<|endoftext|>
bda41cf1eb5c9d98611badb1b7d1a64b06018f2903398c3034b5a2fcb00e6e8b
def fix_jump(self): 'fixes the jump in current frame\n ' if (not self._is_setup): return parent = self.get_active_parent() self.delete_current_parent_key() self.set_active_parent(parent)
fixes the jump in current frame
anima/env/mayaEnv/picker.py
fix_jump
MehmetErer/anima
101
python
def fix_jump(self): '\n ' if (not self._is_setup): return parent = self.get_active_parent() self.delete_current_parent_key() self.set_active_parent(parent)
def fix_jump(self): '\n ' if (not self._is_setup): return parent = self.get_active_parent() self.delete_current_parent_key() self.set_active_parent(parent)<|docstring|>fixes the jump in current frame<|endoftext|>
dcf4b5961a34b7a8db74efa19cb177ab3382ac36a8d5b3e0e89ab7437627b357
def get_stabilizer_matrix(self): 'returns stabilizer matrix\n ' return pm.xform(self._stabilizer_parent, q=True, ws=True, m=True)
returns stabilizer matrix
anima/env/mayaEnv/picker.py
get_stabilizer_matrix
MehmetErer/anima
101
python
def get_stabilizer_matrix(self): '\n ' return pm.xform(self._stabilizer_parent, q=True, ws=True, m=True)
def get_stabilizer_matrix(self): '\n ' return pm.xform(self._stabilizer_parent, q=True, ws=True, m=True)<|docstring|>returns stabilizer matrix<|endoftext|>
5fb528d53849a5d8de1c8ef33183b2a652243d45f5ebccc243489f0f5a1656db
def set_stabilizer_matrix(self, matrix): 'sets stabilizer matrix to matrix\n ' pm.xform(self._stabilizer_parent, ws=True, m=matrix)
sets stabilizer matrix to matrix
anima/env/mayaEnv/picker.py
set_stabilizer_matrix
MehmetErer/anima
101
python
def set_stabilizer_matrix(self, matrix): '\n ' pm.xform(self._stabilizer_parent, ws=True, m=matrix)
def set_stabilizer_matrix(self, matrix): '\n ' pm.xform(self._stabilizer_parent, ws=True, m=matrix)<|docstring|>sets stabilizer matrix to matrix<|endoftext|>
41179b06279671a0ca134230fd795dc4e132efec2d09feb9965ad48edd9a7bbd
def select_anim_curves(self): 'selects animCurves of parentConstraint and stabilizerParent nodes for\n keyframe editing\n ' if (not self._is_setup): return pm.select(auxiliary.get_anim_curves(self._parent_constraint), auxiliary.get_anim_curves(self._stabilizer_parent))
selects animCurves of parentConstraint and stabilizerParent nodes for keyframe editing
anima/env/mayaEnv/picker.py
select_anim_curves
MehmetErer/anima
101
python
def select_anim_curves(self): 'selects animCurves of parentConstraint and stabilizerParent nodes for\n keyframe editing\n ' if (not self._is_setup): return pm.select(auxiliary.get_anim_curves(self._parent_constraint), auxiliary.get_anim_curves(self._stabilizer_parent))
def select_anim_curves(self): 'selects animCurves of parentConstraint and stabilizerParent nodes for\n keyframe editing\n ' if (not self._is_setup): return pm.select(auxiliary.get_anim_curves(self._parent_constraint), auxiliary.get_anim_curves(self._stabilizer_parent))<|docstring|>selects animCurves of parentConstraint and stabilizerParent nodes for keyframe editing<|endoftext|>
c178ff2285e3661c872027710b89f559eaace98b33518694c147f973df460156
def set_keyframe_colors(self): 'sets the keyframe colors for the parentConstraint and stabilizerParent\n ' anim_curves = (auxiliary.get_anim_curves(self._parent_constraint) + auxiliary.get_anim_curves(self._stabilizer_parent)) color = [0, 1, 0] for animCurve in anim_curves: auxiliary.set_anim_curve_color(animCurve, color)
sets the keyframe colors for the parentConstraint and stabilizerParent
anima/env/mayaEnv/picker.py
set_keyframe_colors
MehmetErer/anima
101
python
def set_keyframe_colors(self): '\n ' anim_curves = (auxiliary.get_anim_curves(self._parent_constraint) + auxiliary.get_anim_curves(self._stabilizer_parent)) color = [0, 1, 0] for animCurve in anim_curves: auxiliary.set_anim_curve_color(animCurve, color)
def set_keyframe_colors(self): '\n ' anim_curves = (auxiliary.get_anim_curves(self._parent_constraint) + auxiliary.get_anim_curves(self._stabilizer_parent)) color = [0, 1, 0] for animCurve in anim_curves: auxiliary.set_anim_curve_color(animCurve, color)<|docstring|>sets the keyframe colors for the parentConstraint and stabilizerParent<|endoftext|>
ae18d0dc3032afd93d29f3c3a9392820472b5dc683b80a4ec3b8f41e8ed3e891
def delete_parent_key(self, frame): 'deletes parent keyframes at the given keyframe\n ' if (not self._is_setup): return pm.cutKey(self._parent_constraint, self._stabilizer_parent, cl=True, time=(frame, frame))
deletes parent keyframes at the given keyframe
anima/env/mayaEnv/picker.py
delete_parent_key
MehmetErer/anima
101
python
def delete_parent_key(self, frame): '\n ' if (not self._is_setup): return pm.cutKey(self._parent_constraint, self._stabilizer_parent, cl=True, time=(frame, frame))
def delete_parent_key(self, frame): '\n ' if (not self._is_setup): return pm.cutKey(self._parent_constraint, self._stabilizer_parent, cl=True, time=(frame, frame))<|docstring|>deletes parent keyframes at the given keyframe<|endoftext|>
ae001c740492ea7ce2a060b062402c65ba443a83f0dd51667dac62eb41c8291c
def delete_current_parent_key(self): 'deletes parent keyframes at the current keyframe\n ' current_frame = pm.currentTime(q=True) self.delete_parent_key(current_frame)
deletes parent keyframes at the current keyframe
anima/env/mayaEnv/picker.py
delete_current_parent_key
MehmetErer/anima
101
python
def delete_current_parent_key(self): '\n ' current_frame = pm.currentTime(q=True) self.delete_parent_key(current_frame)
def delete_current_parent_key(self): '\n ' current_frame = pm.currentTime(q=True) self.delete_parent_key(current_frame)<|docstring|>deletes parent keyframes at the current keyframe<|endoftext|>
c36f65dbfccc9c13d915e88ca69e8e03fbd174896f3b27b6fdd2c2295f2deb26
def move_imgs(row): 'Move files according to the label\n !!!! uses global variables\n Args:\n row (pandas row): row of the pandas dataframe\n\n ' shutil.move(src=os.path.join(dir_all_images, f"{row['image_id']}.jpg"), dst=os.path.join(row['dset_dir'], row['dx'], f"{row['image_id']}.jpg")) return None
Move files according to the label !!!! uses global variables Args: row (pandas row): row of the pandas dataframe
others_data_organization.py
move_imgs
raplima/tl-thin-sections
0
python
def move_imgs(row): 'Move files according to the label\n !!!! uses global variables\n Args:\n row (pandas row): row of the pandas dataframe\n\n ' shutil.move(src=os.path.join(dir_all_images, f"{row['image_id']}.jpg"), dst=os.path.join(row['dset_dir'], row['dx'], f"{row['image_id']}.jpg")) return None
def move_imgs(row): 'Move files according to the label\n !!!! uses global variables\n Args:\n row (pandas row): row of the pandas dataframe\n\n ' shutil.move(src=os.path.join(dir_all_images, f"{row['image_id']}.jpg"), dst=os.path.join(row['dset_dir'], row['dx'], f"{row['image_id']}.jpg")) return None<|docstring|>Move files according to the label !!!! uses global variables Args: row (pandas row): row of the pandas dataframe<|endoftext|>
525c968f82f784159583db0d97f06d154177ef9a2b50410f7755c7fb1437c966
@responses.activate def test_create_with_single_use_token(self): '\n Ensure we can create a custom payment when provided with a\n single_use_token.\n (Token representing a new payment card.)\n ' self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_PAYMENT_RESPONSE, status=200) data = {'single_use_token': 'SChsxyprFn176yhD', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') response_data = json.loads(response.content) del response_data['id'] del response_data['url'] content = {'authorization_id': '1', 'details': 'Description of the payment', 'name': 'name of the payment', 'price': '123.00', 'settlement_id': '1', 'transaction_date': response_data['transaction_date'], 'reference_number': '751', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(response_data, content) self.assertEqual(len(mail.outbox), 1) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
Ensure we can create a custom payment when provided with a single_use_token. (Token representing a new payment card.)
store/tests/tests_viewset_CustomPayment.py
test_create_with_single_use_token
RignonNoel/Blitz-API
3
python
@responses.activate def test_create_with_single_use_token(self): '\n Ensure we can create a custom payment when provided with a\n single_use_token.\n (Token representing a new payment card.)\n ' self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_PAYMENT_RESPONSE, status=200) data = {'single_use_token': 'SChsxyprFn176yhD', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') response_data = json.loads(response.content) del response_data['id'] del response_data['url'] content = {'authorization_id': '1', 'details': 'Description of the payment', 'name': 'name of the payment', 'price': '123.00', 'settlement_id': '1', 'transaction_date': response_data['transaction_date'], 'reference_number': '751', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(response_data, content) self.assertEqual(len(mail.outbox), 1) self.assertEqual(response.status_code, status.HTTP_201_CREATED)
@responses.activate def test_create_with_single_use_token(self): '\n Ensure we can create a custom payment when provided with a\n single_use_token.\n (Token representing a new payment card.)\n ' self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_PAYMENT_RESPONSE, status=200) data = {'single_use_token': 'SChsxyprFn176yhD', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') response_data = json.loads(response.content) del response_data['id'] del response_data['url'] content = {'authorization_id': '1', 'details': 'Description of the payment', 'name': 'name of the payment', 'price': '123.00', 'settlement_id': '1', 'transaction_date': response_data['transaction_date'], 'reference_number': '751', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(response_data, content) self.assertEqual(len(mail.outbox), 1) self.assertEqual(response.status_code, status.HTTP_201_CREATED)<|docstring|>Ensure we can create a custom payment when provided with a single_use_token. (Token representing a new payment card.)<|endoftext|>
ae89c679fc80f613797f2738352fde3ae346f16dbaaba68a5e7c9ca601c88914
@responses.activate def test_create_with_invalid_single_use_token(self): "\n Ensure we can't create a custom payment when provided with a bad\n single_use_token.\n (Token representing a new payment card.)\n " self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_INVALID_PAYMENT_TOKEN, status=400) data = {'single_use_token': 'invalid', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'non_field_errors': ['An error occured while processing the payment: invalid payment token or payment profile/card inactive.']} self.assertEqual(json.loads(response.content).get('non_field_errors'), content.get('non_field_errors')) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ensure we can't create a custom payment when provided with a bad single_use_token. (Token representing a new payment card.)
store/tests/tests_viewset_CustomPayment.py
test_create_with_invalid_single_use_token
RignonNoel/Blitz-API
3
python
@responses.activate def test_create_with_invalid_single_use_token(self): "\n Ensure we can't create a custom payment when provided with a bad\n single_use_token.\n (Token representing a new payment card.)\n " self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_INVALID_PAYMENT_TOKEN, status=400) data = {'single_use_token': 'invalid', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'non_field_errors': ['An error occured while processing the payment: invalid payment token or payment profile/card inactive.']} self.assertEqual(json.loads(response.content).get('non_field_errors'), content.get('non_field_errors')) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@responses.activate def test_create_with_invalid_single_use_token(self): "\n Ensure we can't create a custom payment when provided with a bad\n single_use_token.\n (Token representing a new payment card.)\n " self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_INVALID_PAYMENT_TOKEN, status=400) data = {'single_use_token': 'invalid', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'non_field_errors': ['An error occured while processing the payment: invalid payment token or payment profile/card inactive.']} self.assertEqual(json.loads(response.content).get('non_field_errors'), content.get('non_field_errors')) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)<|docstring|>Ensure we can't create a custom payment when provided with a bad single_use_token. (Token representing a new payment card.)<|endoftext|>
9ebfc193e3e89975b0d1c2a090d82c7a6d3a8dfa713d0e50d8f6e9222b2784e5
@responses.activate def test_create_without_permission(self): '\n Ensure we can create a custom payment when provided with a\n single_use_token.\n (Token representing a new payment card.)\n ' self.client.force_authenticate(user=self.user) data = {'single_use_token': 'SChsxyprFn176yhD', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') response_data = json.loads(response.content) content = {'detail': 'You do not have permission to perform this action.'} self.assertEqual(response_data, content) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
Ensure we can create a custom payment when provided with a single_use_token. (Token representing a new payment card.)
store/tests/tests_viewset_CustomPayment.py
test_create_without_permission
RignonNoel/Blitz-API
3
python
@responses.activate def test_create_without_permission(self): '\n Ensure we can create a custom payment when provided with a\n single_use_token.\n (Token representing a new payment card.)\n ' self.client.force_authenticate(user=self.user) data = {'single_use_token': 'SChsxyprFn176yhD', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') response_data = json.loads(response.content) content = {'detail': 'You do not have permission to perform this action.'} self.assertEqual(response_data, content) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
@responses.activate def test_create_without_permission(self): '\n Ensure we can create a custom payment when provided with a\n single_use_token.\n (Token representing a new payment card.)\n ' self.client.force_authenticate(user=self.user) data = {'single_use_token': 'SChsxyprFn176yhD', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') response_data = json.loads(response.content) content = {'detail': 'You do not have permission to perform this action.'} self.assertEqual(response_data, content) self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)<|docstring|>Ensure we can create a custom payment when provided with a single_use_token. (Token representing a new payment card.)<|endoftext|>
165e8dd6c62e397820b9f0cdf2be8c8bbf34b51383294ca05c90faa747e55523
@responses.activate def test_create_payment_issue(self): "\n Ensure we can't create a custom payment when the payment proccessing\n fails.\n " self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_CARD_REFUSED, status=400) data = {'single_use_token': 'invalid', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') content = content = {'non_field_errors': ['An error occured while processing the payment: the request has been declined by the issuing bank.']} self.assertEqual(json.loads(response.content).get('non_field_errors'), content.get('non_field_errors')) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ensure we can't create a custom payment when the payment proccessing fails.
store/tests/tests_viewset_CustomPayment.py
test_create_payment_issue
RignonNoel/Blitz-API
3
python
@responses.activate def test_create_payment_issue(self): "\n Ensure we can't create a custom payment when the payment proccessing\n fails.\n " self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_CARD_REFUSED, status=400) data = {'single_use_token': 'invalid', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') content = content = {'non_field_errors': ['An error occured while processing the payment: the request has been declined by the issuing bank.']} self.assertEqual(json.loads(response.content).get('non_field_errors'), content.get('non_field_errors')) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
@responses.activate def test_create_payment_issue(self): "\n Ensure we can't create a custom payment when the payment proccessing\n fails.\n " self.client.force_authenticate(user=self.admin) responses.add(responses.POST, 'http://example.com/cardpayments/v1/accounts/0123456789/auths/', json=SAMPLE_CARD_REFUSED, status=400) data = {'single_use_token': 'invalid', 'price': '123.00', 'name': 'name of the payment', 'details': 'Description of the payment', 'user': reverse('user-detail', args=[self.user.id])} response = self.client.post(reverse('custompayment-list'), data, format='json') content = content = {'non_field_errors': ['An error occured while processing the payment: the request has been declined by the issuing bank.']} self.assertEqual(json.loads(response.content).get('non_field_errors'), content.get('non_field_errors')) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)<|docstring|>Ensure we can't create a custom payment when the payment proccessing fails.<|endoftext|>
5e308435d23d485ee3d2d2400e79ead07a28196c77c96127f735d9cf632bb86d
def test_create_missing_field(self): "\n Ensure we can't create a custom payment when required field are\n missing.\n " self.client.force_authenticate(user=self.admin) data = {} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'user': ['This field is required.'], 'price': ['This field is required.'], 'name': ['This field is required.'], 'single_use_token': ['This field is required.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ensure we can't create a custom payment when required field are missing.
store/tests/tests_viewset_CustomPayment.py
test_create_missing_field
RignonNoel/Blitz-API
3
python
def test_create_missing_field(self): "\n Ensure we can't create a custom payment when required field are\n missing.\n " self.client.force_authenticate(user=self.admin) data = {} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'user': ['This field is required.'], 'price': ['This field is required.'], 'name': ['This field is required.'], 'single_use_token': ['This field is required.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_missing_field(self): "\n Ensure we can't create a custom payment when required field are\n missing.\n " self.client.force_authenticate(user=self.admin) data = {} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'user': ['This field is required.'], 'price': ['This field is required.'], 'name': ['This field is required.'], 'single_use_token': ['This field is required.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)<|docstring|>Ensure we can't create a custom payment when required field are missing.<|endoftext|>
199b70bae5962046eea6907b2811b40a846cbbb4d3bf2eff5a9c42c6a87f85b3
def test_create_null_field(self): "\n Ensure we can't create a cutom payment when required field are null.\n " self.client.force_authenticate(user=self.admin) data = {'user': None, 'name': None, 'details': None, 'price': None} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'user': ['This field may not be null.'], 'name': ['This field may not be null.'], 'price': ['This field may not be null.'], 'single_use_token': ['This field is required.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ensure we can't create a cutom payment when required field are null.
store/tests/tests_viewset_CustomPayment.py
test_create_null_field
RignonNoel/Blitz-API
3
python
def test_create_null_field(self): "\n \n " self.client.force_authenticate(user=self.admin) data = {'user': None, 'name': None, 'details': None, 'price': None} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'user': ['This field may not be null.'], 'name': ['This field may not be null.'], 'price': ['This field may not be null.'], 'single_use_token': ['This field is required.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_null_field(self): "\n \n " self.client.force_authenticate(user=self.admin) data = {'user': None, 'name': None, 'details': None, 'price': None} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'user': ['This field may not be null.'], 'name': ['This field may not be null.'], 'price': ['This field may not be null.'], 'single_use_token': ['This field is required.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)<|docstring|>Ensure we can't create a cutom payment when required field are null.<|endoftext|>
5b09789ce7703bbbc07544a653c261f72f2865b76667caf2df80f46949ba2068
def test_create_invalid_field(self): "\n Ensure we can't create a custom payment when required field are\n invalid.\n " self.client.force_authenticate(user=self.admin) data = {'user': 'invalid', 'name': (1,), 'details': (1,), 'price': 'invalid', 'single_use_token': (1,)} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'details': ['Not a valid string.'], 'name': ['Not a valid string.'], 'price': ['A valid number is required.'], 'single_use_token': ['Not a valid string.'], 'user': ['Invalid hyperlink - No URL match.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
Ensure we can't create a custom payment when required field are invalid.
store/tests/tests_viewset_CustomPayment.py
test_create_invalid_field
RignonNoel/Blitz-API
3
python
def test_create_invalid_field(self): "\n Ensure we can't create a custom payment when required field are\n invalid.\n " self.client.force_authenticate(user=self.admin) data = {'user': 'invalid', 'name': (1,), 'details': (1,), 'price': 'invalid', 'single_use_token': (1,)} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'details': ['Not a valid string.'], 'name': ['Not a valid string.'], 'price': ['A valid number is required.'], 'single_use_token': ['Not a valid string.'], 'user': ['Invalid hyperlink - No URL match.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_invalid_field(self): "\n Ensure we can't create a custom payment when required field are\n invalid.\n " self.client.force_authenticate(user=self.admin) data = {'user': 'invalid', 'name': (1,), 'details': (1,), 'price': 'invalid', 'single_use_token': (1,)} response = self.client.post(reverse('custompayment-list'), data, format='json') content = {'details': ['Not a valid string.'], 'name': ['Not a valid string.'], 'price': ['A valid number is required.'], 'single_use_token': ['Not a valid string.'], 'user': ['Invalid hyperlink - No URL match.']} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)<|docstring|>Ensure we can't create a custom payment when required field are invalid.<|endoftext|>
ae77d27e8192e218be4603eaaa37da49f1da4f0d1a2127c62cfd7907fbe5f4cf
def test_delete(self): '\n Ensure we can delete a custom payment.\n ' self.client.force_authenticate(user=self.admin) response = self.client.delete(reverse('custompayment-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
Ensure we can delete a custom payment.
store/tests/tests_viewset_CustomPayment.py
test_delete
RignonNoel/Blitz-API
3
python
def test_delete(self): '\n \n ' self.client.force_authenticate(user=self.admin) response = self.client.delete(reverse('custompayment-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_delete(self): '\n \n ' self.client.force_authenticate(user=self.admin) response = self.client.delete(reverse('custompayment-detail', kwargs={'pk': 1})) self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)<|docstring|>Ensure we can delete a custom payment.<|endoftext|>
298729c1d1c6a33e7ed64b42789eda96d792bf86d4b63236f4d9fe0613c6ca5b
def test_update(self): "\n Ensure we can't update a custom payment.\n " self.client.force_authenticate(user=self.admin) data = {'name': 'new name'} response = self.client.patch(reverse('custompayment-detail', kwargs={'pk': 1}), data, format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
Ensure we can't update a custom payment.
store/tests/tests_viewset_CustomPayment.py
test_update
RignonNoel/Blitz-API
3
python
def test_update(self): "\n \n " self.client.force_authenticate(user=self.admin) data = {'name': 'new name'} response = self.client.patch(reverse('custompayment-detail', kwargs={'pk': 1}), data, format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)
def test_update(self): "\n \n " self.client.force_authenticate(user=self.admin) data = {'name': 'new name'} response = self.client.patch(reverse('custompayment-detail', kwargs={'pk': 1}), data, format='json') self.assertEqual(response.status_code, status.HTTP_405_METHOD_NOT_ALLOWED)<|docstring|>Ensure we can't update a custom payment.<|endoftext|>
dfc350aed095b5a1134840114f4d24f362ffde717c40b6ccef745e6b13feef38
def test_list(self): "\n Ensure we can't list cutom payments as an unauthenticated user.\n " response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'detail': 'Authentication credentials were not provided.'} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Ensure we can't list cutom payments as an unauthenticated user.
store/tests/tests_viewset_CustomPayment.py
test_list
RignonNoel/Blitz-API
3
python
def test_list(self): "\n \n " response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'detail': 'Authentication credentials were not provided.'} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_list(self): "\n \n " response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'detail': 'Authentication credentials were not provided.'} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)<|docstring|>Ensure we can't list cutom payments as an unauthenticated user.<|endoftext|>
de0c2dcfaadcb03be7cff1793c3e951552a86cab6f48cbb9416a069aa9a95dca
def test_list_owner(self): '\n Ensure we can list owned custom payments as an authenticated user.\n ' self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'transaction_date': data['results'][0]['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))}]} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_200_OK)
Ensure we can list owned custom payments as an authenticated user.
store/tests/tests_viewset_CustomPayment.py
test_list_owner
RignonNoel/Blitz-API
3
python
def test_list_owner(self): '\n \n ' self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'transaction_date': data['results'][0]['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))}]} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_list_owner(self): '\n \n ' self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'count': 1, 'next': None, 'previous': None, 'results': [{'id': 1, 'transaction_date': data['results'][0]['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))}]} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_200_OK)<|docstring|>Ensure we can list owned custom payments as an authenticated user.<|endoftext|>
f7b140e51a52869fd128442994ca3dbbb3a5a4679a73568ad5df67002fff2816
def test_list_admin(self): '\n Ensure we can list all custom payments as an admin.\n ' self.client.force_authenticate(user=self.admin) response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'transaction_date': data['results'][0]['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))}, {'id': 2, 'transaction_date': data['results'][1]['transaction_date'], 'authorization_id': '1', 'reference_number': '751', 'settlement_id': '1', 'price': '123.00', 'name': 'admin payment', 'details': 'Description of the admin payment', 'url': 'http://testserver/custom_payments/2', 'user': ('http://testserver/users/' + str(self.admin.id))}]} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_200_OK)
Ensure we can list all custom payments as an admin.
store/tests/tests_viewset_CustomPayment.py
test_list_admin
RignonNoel/Blitz-API
3
python
def test_list_admin(self): '\n \n ' self.client.force_authenticate(user=self.admin) response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'transaction_date': data['results'][0]['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))}, {'id': 2, 'transaction_date': data['results'][1]['transaction_date'], 'authorization_id': '1', 'reference_number': '751', 'settlement_id': '1', 'price': '123.00', 'name': 'admin payment', 'details': 'Description of the admin payment', 'url': 'http://testserver/custom_payments/2', 'user': ('http://testserver/users/' + str(self.admin.id))}]} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_list_admin(self): '\n \n ' self.client.force_authenticate(user=self.admin) response = self.client.get(reverse('custompayment-list'), format='json') data = json.loads(response.content) content = {'count': 2, 'next': None, 'previous': None, 'results': [{'id': 1, 'transaction_date': data['results'][0]['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))}, {'id': 2, 'transaction_date': data['results'][1]['transaction_date'], 'authorization_id': '1', 'reference_number': '751', 'settlement_id': '1', 'price': '123.00', 'name': 'admin payment', 'details': 'Description of the admin payment', 'url': 'http://testserver/custom_payments/2', 'user': ('http://testserver/users/' + str(self.admin.id))}]} self.assertEqual(data, content) self.assertEqual(response.status_code, status.HTTP_200_OK)<|docstring|>Ensure we can list all custom payments as an admin.<|endoftext|>
5b259e1075c368bfcd18a26ba4c500e67fc096ea9e2ea6623add4e21e047fbef
def test_read(self): "\n Ensure we can't read a custom payment as an unauthenticated user.\n " response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) content = {'detail': 'Authentication credentials were not provided.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
Ensure we can't read a custom payment as an unauthenticated user.
store/tests/tests_viewset_CustomPayment.py
test_read
RignonNoel/Blitz-API
3
python
def test_read(self): "\n \n " response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) content = {'detail': 'Authentication credentials were not provided.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_read(self): "\n \n " response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) content = {'detail': 'Authentication credentials were not provided.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)<|docstring|>Ensure we can't read a custom payment as an unauthenticated user.<|endoftext|>
291a314105e27e89f7bde6153674b82f91650418af95533c9cd128e756e1cb10
def test_read_owner(self): '\n Ensure we can read a custom payment owned by an authenticated user.\n ' self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) data = json.loads(response.content) content = {'id': 1, 'transaction_date': data['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_200_OK)
Ensure we can read a custom payment owned by an authenticated user.
store/tests/tests_viewset_CustomPayment.py
test_read_owner
RignonNoel/Blitz-API
3
python
def test_read_owner(self): '\n \n ' self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) data = json.loads(response.content) content = {'id': 1, 'transaction_date': data['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read_owner(self): '\n \n ' self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) data = json.loads(response.content) content = {'id': 1, 'transaction_date': data['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_200_OK)<|docstring|>Ensure we can read a custom payment owned by an authenticated user.<|endoftext|>
852367f9104b9ee144f9039c65adf01f380edde500e1cec742e9d9de7cf60c40
def test_read_owner_not_owned(self): "\n Ensure we can't read a custom payment not owned by an authenticated\n user.\n " self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 2})) content = {'detail': 'Not found.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Ensure we can't read a custom payment not owned by an authenticated user.
store/tests/tests_viewset_CustomPayment.py
test_read_owner_not_owned
RignonNoel/Blitz-API
3
python
def test_read_owner_not_owned(self): "\n Ensure we can't read a custom payment not owned by an authenticated\n user.\n " self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 2})) content = {'detail': 'Not found.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_read_owner_not_owned(self): "\n Ensure we can't read a custom payment not owned by an authenticated\n user.\n " self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 2})) content = {'detail': 'Not found.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)<|docstring|>Ensure we can't read a custom payment not owned by an authenticated user.<|endoftext|>
0c2625ff794e3e436c6626752d827c9e03c13399e492aa2711035c4791d5e658
def test_read_admin(self): '\n Ensure we can read any custom payment as an admin.\n ' self.client.force_authenticate(user=self.admin) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) data = json.loads(response.content) content = {'id': 1, 'transaction_date': data['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_200_OK)
Ensure we can read any custom payment as an admin.
store/tests/tests_viewset_CustomPayment.py
test_read_admin
RignonNoel/Blitz-API
3
python
def test_read_admin(self): '\n \n ' self.client.force_authenticate(user=self.admin) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) data = json.loads(response.content) content = {'id': 1, 'transaction_date': data['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read_admin(self): '\n \n ' self.client.force_authenticate(user=self.admin) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 1})) data = json.loads(response.content) content = {'id': 1, 'transaction_date': data['transaction_date'], 'authorization_id': '1', 'settlement_id': '1', 'reference_number': '751', 'price': '123.00', 'name': 'test payment', 'details': 'Description of the test payment', 'url': 'http://testserver/custom_payments/1', 'user': ('http://testserver/users/' + str(self.user.id))} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_200_OK)<|docstring|>Ensure we can read any custom payment as an admin.<|endoftext|>
09cf15b0733fe1fee8a4f8047677ffa4272fe9592f801d56144bd2d78bd39cb9
def test_read_non_existent(self): "\n Ensure we get not found when asking for a custom payment that doesn't\n exist.\n " self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 999})) content = {'detail': 'Not found.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
Ensure we get not found when asking for a custom payment that doesn't exist.
store/tests/tests_viewset_CustomPayment.py
test_read_non_existent
RignonNoel/Blitz-API
3
python
def test_read_non_existent(self): "\n Ensure we get not found when asking for a custom payment that doesn't\n exist.\n " self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 999})) content = {'detail': 'Not found.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_read_non_existent(self): "\n Ensure we get not found when asking for a custom payment that doesn't\n exist.\n " self.client.force_authenticate(user=self.user) response = self.client.get(reverse('custompayment-detail', kwargs={'pk': 999})) content = {'detail': 'Not found.'} self.assertEqual(json.loads(response.content), content) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)<|docstring|>Ensure we get not found when asking for a custom payment that doesn't exist.<|endoftext|>
3c0d61db8ff25a54ed3126b558fb2a664949028699203f346072feb4cb4dadb7
def acc_thresh_multich(input, target, thresh=0.5, sigmoid=True, one_ch=None): 'Compute accuracy when `y_pred` and `y_true` are the same size.' if sigmoid: input = input.sigmoid() n = input.shape[0] if (one_ch is not None): input = input[(:, one_ch, None)] target = target[(:, one_ch, None)] input = input.view(n, (- 1)) target = target.view(n, (- 1)) return ((input > thresh) == target.byte()).float().mean()
Compute accuracy when `y_pred` and `y_true` are the same size.
Segmentation/losses.py
acc_thresh_multich
sebasmos/Building.predictor
0
python
def acc_thresh_multich(input, target, thresh=0.5, sigmoid=True, one_ch=None): if sigmoid: input = input.sigmoid() n = input.shape[0] if (one_ch is not None): input = input[(:, one_ch, None)] target = target[(:, one_ch, None)] input = input.view(n, (- 1)) target = target.view(n, (- 1)) return ((input > thresh) == target.byte()).float().mean()
def acc_thresh_multich(input, target, thresh=0.5, sigmoid=True, one_ch=None): if sigmoid: input = input.sigmoid() n = input.shape[0] if (one_ch is not None): input = input[(:, one_ch, None)] target = target[(:, one_ch, None)] input = input.view(n, (- 1)) target = target.view(n, (- 1)) return ((input > thresh) == target.byte()).float().mean()<|docstring|>Compute accuracy when `y_pred` and `y_true` are the same size.<|endoftext|>
c41803e8fa9a0cad8536ff892544d7d0a22723d1a6c88ade2d6f8d6788e7ad1d
def dice_multich(input, targs, iou=False, one_ch=None): 'Dice coefficient metric for binary target. If iou=True, returns iou metric, classic for segmentation problems.' n = targs.shape[0] input = input.sigmoid() if (one_ch is not None): input = input[(:, one_ch, None)] targs = targs[(:, one_ch, None)] input = (input > 0.5).view(n, (- 1)).float() targs = targs.view(n, (- 1)).float() intersect = (input * targs).sum().float() union = (input + targs).sum().float() if (not iou): return (((2.0 * intersect) / union) if (union > 0) else union.new([1.0]).squeeze()) else: return (intersect / ((union - intersect) + 1.0))
Dice coefficient metric for binary target. If iou=True, returns iou metric, classic for segmentation problems.
Segmentation/losses.py
dice_multich
sebasmos/Building.predictor
0
python
def dice_multich(input, targs, iou=False, one_ch=None): n = targs.shape[0] input = input.sigmoid() if (one_ch is not None): input = input[(:, one_ch, None)] targs = targs[(:, one_ch, None)] input = (input > 0.5).view(n, (- 1)).float() targs = targs.view(n, (- 1)).float() intersect = (input * targs).sum().float() union = (input + targs).sum().float() if (not iou): return (((2.0 * intersect) / union) if (union > 0) else union.new([1.0]).squeeze()) else: return (intersect / ((union - intersect) + 1.0))
def dice_multich(input, targs, iou=False, one_ch=None): n = targs.shape[0] input = input.sigmoid() if (one_ch is not None): input = input[(:, one_ch, None)] targs = targs[(:, one_ch, None)] input = (input > 0.5).view(n, (- 1)).float() targs = targs.view(n, (- 1)).float() intersect = (input * targs).sum().float() union = (input + targs).sum().float() if (not iou): return (((2.0 * intersect) / union) if (union > 0) else union.new([1.0]).squeeze()) else: return (intersect / ((union - intersect) + 1.0))<|docstring|>Dice coefficient metric for binary target. If iou=True, returns iou metric, classic for segmentation problems.<|endoftext|>
2e6e8eb076600d812cbe89862dfbd0e69d77b2bee62fbcccdbe059d65be5bf43
def iou_pytorch(outputs: torch.Tensor, labels: torch.Tensor): 'Fast enough iou calculation function' SMOOTH = 1e-06 outputs = outputs.squeeze(1) intersection = (outputs & labels).float().sum((1, 2)) union = (outputs | labels).float().sum((1, 2)) iou = ((intersection + SMOOTH) / (union + SMOOTH)) thresholded = (torch.clamp((20 * (iou - 0.5)), 0, 10).ceil() / 10) return thresholded.mean()
Fast enough iou calculation function
Segmentation/losses.py
iou_pytorch
sebasmos/Building.predictor
0
python
def iou_pytorch(outputs: torch.Tensor, labels: torch.Tensor): SMOOTH = 1e-06 outputs = outputs.squeeze(1) intersection = (outputs & labels).float().sum((1, 2)) union = (outputs | labels).float().sum((1, 2)) iou = ((intersection + SMOOTH) / (union + SMOOTH)) thresholded = (torch.clamp((20 * (iou - 0.5)), 0, 10).ceil() / 10) return thresholded.mean()
def iou_pytorch(outputs: torch.Tensor, labels: torch.Tensor): SMOOTH = 1e-06 outputs = outputs.squeeze(1) intersection = (outputs & labels).float().sum((1, 2)) union = (outputs | labels).float().sum((1, 2)) iou = ((intersection + SMOOTH) / (union + SMOOTH)) thresholded = (torch.clamp((20 * (iou - 0.5)), 0, 10).ceil() / 10) return thresholded.mean()<|docstring|>Fast enough iou calculation function<|endoftext|>
02c20d06f5e51f3926d4e93b63d1c3eab35615852f435cb9465b115a2f6488c3
def lovasz_grad(gt_sorted): '\n Computes gradient of the Lovasz extension w.r.t sorted errors\n See Alg. 1 in paper\n ' p = len(gt_sorted) gts = gt_sorted.sum() intersection = (gts - gt_sorted.float().cumsum(0)) union = (gts + (1 - gt_sorted).float().cumsum(0)) jaccard = (1 - (intersection / union)) if (p > 1): jaccard[1:p] = (jaccard[1:p] - jaccard[0:(- 1)]) return jaccard
Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper
Segmentation/losses.py
lovasz_grad
sebasmos/Building.predictor
0
python
def lovasz_grad(gt_sorted): '\n Computes gradient of the Lovasz extension w.r.t sorted errors\n See Alg. 1 in paper\n ' p = len(gt_sorted) gts = gt_sorted.sum() intersection = (gts - gt_sorted.float().cumsum(0)) union = (gts + (1 - gt_sorted).float().cumsum(0)) jaccard = (1 - (intersection / union)) if (p > 1): jaccard[1:p] = (jaccard[1:p] - jaccard[0:(- 1)]) return jaccard
def lovasz_grad(gt_sorted): '\n Computes gradient of the Lovasz extension w.r.t sorted errors\n See Alg. 1 in paper\n ' p = len(gt_sorted) gts = gt_sorted.sum() intersection = (gts - gt_sorted.float().cumsum(0)) union = (gts + (1 - gt_sorted).float().cumsum(0)) jaccard = (1 - (intersection / union)) if (p > 1): jaccard[1:p] = (jaccard[1:p] - jaccard[0:(- 1)]) return jaccard<|docstring|>Computes gradient of the Lovasz extension w.r.t sorted errors See Alg. 1 in paper<|endoftext|>
b9a7e90968afc6de62fa3780186d6c7b25dd91dd064e002e06f06ebaa9a75951
def lovasz_hinge_flat(logits, labels, ignore_index): '\n Binary Lovasz hinge loss\n logits: [P] Variable, logits at each prediction (between -\\infty and +\\infty)\n labels: [P] Tensor, binary ground truth labels (0 or 1)\n ignore_index: label to ignore\n ' logits = logits.contiguous().view((- 1)) labels = labels.contiguous().view((- 1)) if (ignore_index is not None): mask = (labels != ignore_index) logits = logits[mask] labels = labels[mask] errors = hinge(logits, labels) (errors_sorted, perm) = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot((F.elu(errors_sorted) + 1), grad) return loss
Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) ignore_index: label to ignore
Segmentation/losses.py
lovasz_hinge_flat
sebasmos/Building.predictor
0
python
def lovasz_hinge_flat(logits, labels, ignore_index): '\n Binary Lovasz hinge loss\n logits: [P] Variable, logits at each prediction (between -\\infty and +\\infty)\n labels: [P] Tensor, binary ground truth labels (0 or 1)\n ignore_index: label to ignore\n ' logits = logits.contiguous().view((- 1)) labels = labels.contiguous().view((- 1)) if (ignore_index is not None): mask = (labels != ignore_index) logits = logits[mask] labels = labels[mask] errors = hinge(logits, labels) (errors_sorted, perm) = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot((F.elu(errors_sorted) + 1), grad) return loss
def lovasz_hinge_flat(logits, labels, ignore_index): '\n Binary Lovasz hinge loss\n logits: [P] Variable, logits at each prediction (between -\\infty and +\\infty)\n labels: [P] Tensor, binary ground truth labels (0 or 1)\n ignore_index: label to ignore\n ' logits = logits.contiguous().view((- 1)) labels = labels.contiguous().view((- 1)) if (ignore_index is not None): mask = (labels != ignore_index) logits = logits[mask] labels = labels[mask] errors = hinge(logits, labels) (errors_sorted, perm) = torch.sort(errors, dim=0, descending=True) perm = perm.data gt_sorted = labels[perm] grad = lovasz_grad(gt_sorted) loss = torch.dot((F.elu(errors_sorted) + 1), grad) return loss<|docstring|>Binary Lovasz hinge loss logits: [P] Variable, logits at each prediction (between -\infty and +\infty) labels: [P] Tensor, binary ground truth labels (0 or 1) ignore_index: label to ignore<|endoftext|>
8f7bcc9e2ae07263172168d1f70e2b993857db10e1d46e8f1a989fe7fa6b5119
def forward(self, y_pred, y_true): '\n :param y_pred: NxCxHxW\n :param y_true: NxCxHxW\n :return: scalar\n ' per_image = self.per_image y_pred = y_pred.sigmoid() batch_size = y_pred.size()[0] eps = 1e-05 if (not per_image): batch_size = 1 dice_target = y_true.contiguous().view(batch_size, (- 1)).float() dice_output = y_pred.contiguous().view(batch_size, (- 1)) intersection = torch.sum((dice_output * dice_target), dim=1) union = ((torch.sum(dice_output, dim=1) + torch.sum(dice_target, dim=1)) + eps) loss = (1 - (((2 * intersection) + eps) / union)).mean() return loss
:param y_pred: NxCxHxW :param y_true: NxCxHxW :return: scalar
Segmentation/losses.py
forward
sebasmos/Building.predictor
0
python
def forward(self, y_pred, y_true): '\n :param y_pred: NxCxHxW\n :param y_true: NxCxHxW\n :return: scalar\n ' per_image = self.per_image y_pred = y_pred.sigmoid() batch_size = y_pred.size()[0] eps = 1e-05 if (not per_image): batch_size = 1 dice_target = y_true.contiguous().view(batch_size, (- 1)).float() dice_output = y_pred.contiguous().view(batch_size, (- 1)) intersection = torch.sum((dice_output * dice_target), dim=1) union = ((torch.sum(dice_output, dim=1) + torch.sum(dice_target, dim=1)) + eps) loss = (1 - (((2 * intersection) + eps) / union)).mean() return loss
def forward(self, y_pred, y_true): '\n :param y_pred: NxCxHxW\n :param y_true: NxCxHxW\n :return: scalar\n ' per_image = self.per_image y_pred = y_pred.sigmoid() batch_size = y_pred.size()[0] eps = 1e-05 if (not per_image): batch_size = 1 dice_target = y_true.contiguous().view(batch_size, (- 1)).float() dice_output = y_pred.contiguous().view(batch_size, (- 1)) intersection = torch.sum((dice_output * dice_target), dim=1) union = ((torch.sum(dice_output, dim=1) + torch.sum(dice_target, dim=1)) + eps) loss = (1 - (((2 * intersection) + eps) / union)).mean() return loss<|docstring|>:param y_pred: NxCxHxW :param y_true: NxCxHxW :return: scalar<|endoftext|>
9c10877f2c377db7db1d37314f7dfda7475218b74d4cda43e22beb7bffd1e54b
def forward(self, y_pred, y_true): '\n :param y_pred: NxCxHxW\n :param y_true: NxCxHxW\n :return: scalar\n ' y_pred = y_pred.sigmoid() gamma = self.gamma ignore_index = self.ignore_index outputs = y_pred.contiguous() targets = y_true.contiguous() eps = 1e-08 non_ignored = (targets.view((- 1)) != ignore_index) targets = targets.view((- 1))[non_ignored].float() outputs = outputs.contiguous().view((- 1))[non_ignored] outputs = torch.clamp(outputs, eps, (1.0 - eps)) targets = torch.clamp(targets, eps, (1.0 - eps)) pt = (((1 - targets) * (1 - outputs)) + (targets * outputs)) return ((- ((1.0 - pt) ** gamma)) * torch.log(pt)).mean()
:param y_pred: NxCxHxW :param y_true: NxCxHxW :return: scalar
Segmentation/losses.py
forward
sebasmos/Building.predictor
0
python
def forward(self, y_pred, y_true): '\n :param y_pred: NxCxHxW\n :param y_true: NxCxHxW\n :return: scalar\n ' y_pred = y_pred.sigmoid() gamma = self.gamma ignore_index = self.ignore_index outputs = y_pred.contiguous() targets = y_true.contiguous() eps = 1e-08 non_ignored = (targets.view((- 1)) != ignore_index) targets = targets.view((- 1))[non_ignored].float() outputs = outputs.contiguous().view((- 1))[non_ignored] outputs = torch.clamp(outputs, eps, (1.0 - eps)) targets = torch.clamp(targets, eps, (1.0 - eps)) pt = (((1 - targets) * (1 - outputs)) + (targets * outputs)) return ((- ((1.0 - pt) ** gamma)) * torch.log(pt)).mean()
def forward(self, y_pred, y_true): '\n :param y_pred: NxCxHxW\n :param y_true: NxCxHxW\n :return: scalar\n ' y_pred = y_pred.sigmoid() gamma = self.gamma ignore_index = self.ignore_index outputs = y_pred.contiguous() targets = y_true.contiguous() eps = 1e-08 non_ignored = (targets.view((- 1)) != ignore_index) targets = targets.view((- 1))[non_ignored].float() outputs = outputs.contiguous().view((- 1))[non_ignored] outputs = torch.clamp(outputs, eps, (1.0 - eps)) targets = torch.clamp(targets, eps, (1.0 - eps)) pt = (((1 - targets) * (1 - outputs)) + (targets * outputs)) return ((- ((1.0 - pt) ** gamma)) * torch.log(pt)).mean()<|docstring|>:param y_pred: NxCxHxW :param y_true: NxCxHxW :return: scalar<|endoftext|>
e42181da0425ea410870ded4a2ae867ecbaa27b2dd04a20581ed3d92249043b8
def get_indices_by_groups(dataset): '\n Only use this to see F1-scores for how well we can recover the subgroups\n ' indices = [] for g in range(len(dataset.group_labels)): indices.append(np.where((dataset.targets_all['group_idx'] == g))[0]) return indices
Only use this to see F1-scores for how well we can recover the subgroups
groups.py
get_indices_by_groups
mzio/SupContrast
0
python
def get_indices_by_groups(dataset): '\n \n ' indices = [] for g in range(len(dataset.group_labels)): indices.append(np.where((dataset.targets_all['group_idx'] == g))[0]) return indices
def get_indices_by_groups(dataset): '\n \n ' indices = [] for g in range(len(dataset.group_labels)): indices.append(np.where((dataset.targets_all['group_idx'] == g))[0]) return indices<|docstring|>Only use this to see F1-scores for how well we can recover the subgroups<|endoftext|>
016fca59254694ec084605ab5960630dd7d2eece033f720edf111edb0f9b1d9d
def compute_group_labels(embeddings, all_indices, dataloader, cluster_method='kmeans', n_clusters=2, save_name=None, ix=None, save=True, norm_cost_matrix=True, save_dir='./group_predictions', verbose=False, seed=42): '\n Compute group labels given embeddings\n - Will also report precision, recall, f1-score for the groups (assuming best-assignment)\n - Note this requires referencing the true group labels, and then mapping arbirtrary group indices generated by the clustering to an assignment that best matches these true group labels\n - But GDRO sees any group index assignment as the same. \n - There is no advantage with mapping the arbitrary group indices here\n ' all_cluster_labels = [] all_indices_by_pred_groups = [] ref_indices_by_class = get_indices_by_class(dataloader.dataset) ref_indices_by_group = get_indices_by_groups(dataloader.dataset) for (cix, class_indices) in enumerate(ref_indices_by_class): (cluster_labels, cluster_correct) = compute_clusters(embeddings[class_indices], cluster_method, n_clusters, indices=all_indices[class_indices], dataloader=dataloader, seed=seed) for c in np.unique(cluster_labels): all_indices_by_pred_groups.append(class_indices[np.where((cluster_labels == c))[0]]) pred_group_labels = np.zeros(len(all_indices)) for (g, indices) in enumerate(all_indices_by_pred_groups): for i in indices: pred_group_labels[i] = g if save: save_path = join(save_dir, f'pred_groups_{save_name}.npy') with open(save_path, 'wb') as f: np.save(f, pred_group_labels) print(f'Saved group predictions to {save_path}') return (pred_group_labels, None) cost_matrix = np.zeros((len(all_indices_by_pred_groups), len(ref_indices_by_group))) cost_matrix_normed = np.zeros((len(all_indices_by_pred_groups), len(ref_indices_by_group))) for (pix, pred_group_indices) in enumerate(all_indices_by_pred_groups): for (gix, group_indices) in enumerate(ref_indices_by_group): intersection_counts = np.intersect1d(pred_group_indices, group_indices).shape[0] cost = ((- 1) * intersection_counts) cost_normed = (1 - (intersection_counts / len(group_indices))) output = f'{pix} {gix} {intersection_counts:4d} {len(group_indices):4d} {(intersection_counts / len(group_indices)):<.3f}' if verbose: print(output) cost_matrix[pix][gix] = cost cost_matrix_normed[pix][gix] = cost_normed if verbose: print('') if norm_cost_matrix: cost_matrix = cost_matrix_normed (row_ind, col_ind) = linear_sum_assignment(cost_matrix) print(f'Hungarian assignment: {col_ind}') dataset = dataloader.dataset pred_group_labels = np.zeros(len(dataloader.dataset.targets_all['target'])) for (pix, pred_group_indices) in enumerate(all_indices_by_pred_groups): pred_group_labels[pred_group_indices] = col_ind[pix] acc = ((pred_group_labels == dataset.targets_all['group_idx']).sum() / len(pred_group_labels)) print(f'Acc: {(acc * 100):<.3f}%') print('Precision, Recall, F1-Score (%)') print('- Average by:') micro_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='micro') prf = ' '.join([f'{(m * 100):<.3f}' for m in micro_prf[:3]]) print(f' - micro: {prf}') macro_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='macro') prf = ' '.join([f'{(m * 100):<.3f}' for m in macro_prf[:3]]) print(f' - macro: {prf}') weighted_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='weighted') prf = ' '.join([f'{(m * 100):<.3f}' for m in weighted_prf[:3]]) print(f' - weighted: {prf}') if save: save_path = join(save_dir, f'pred_groups_{save_name}.npy') with open(save_path, 'wb') as f: np.save(f, pred_group_labels) print(f'Saved group predictions to {save_path}') return (pred_group_labels, (micro_prf, macro_prf, weighted_prf))
Compute group labels given embeddings - Will also report precision, recall, f1-score for the groups (assuming best-assignment) - Note this requires referencing the true group labels, and then mapping arbirtrary group indices generated by the clustering to an assignment that best matches these true group labels - But GDRO sees any group index assignment as the same. - There is no advantage with mapping the arbitrary group indices here
groups.py
compute_group_labels
mzio/SupContrast
0
python
def compute_group_labels(embeddings, all_indices, dataloader, cluster_method='kmeans', n_clusters=2, save_name=None, ix=None, save=True, norm_cost_matrix=True, save_dir='./group_predictions', verbose=False, seed=42): '\n Compute group labels given embeddings\n - Will also report precision, recall, f1-score for the groups (assuming best-assignment)\n - Note this requires referencing the true group labels, and then mapping arbirtrary group indices generated by the clustering to an assignment that best matches these true group labels\n - But GDRO sees any group index assignment as the same. \n - There is no advantage with mapping the arbitrary group indices here\n ' all_cluster_labels = [] all_indices_by_pred_groups = [] ref_indices_by_class = get_indices_by_class(dataloader.dataset) ref_indices_by_group = get_indices_by_groups(dataloader.dataset) for (cix, class_indices) in enumerate(ref_indices_by_class): (cluster_labels, cluster_correct) = compute_clusters(embeddings[class_indices], cluster_method, n_clusters, indices=all_indices[class_indices], dataloader=dataloader, seed=seed) for c in np.unique(cluster_labels): all_indices_by_pred_groups.append(class_indices[np.where((cluster_labels == c))[0]]) pred_group_labels = np.zeros(len(all_indices)) for (g, indices) in enumerate(all_indices_by_pred_groups): for i in indices: pred_group_labels[i] = g if save: save_path = join(save_dir, f'pred_groups_{save_name}.npy') with open(save_path, 'wb') as f: np.save(f, pred_group_labels) print(f'Saved group predictions to {save_path}') return (pred_group_labels, None) cost_matrix = np.zeros((len(all_indices_by_pred_groups), len(ref_indices_by_group))) cost_matrix_normed = np.zeros((len(all_indices_by_pred_groups), len(ref_indices_by_group))) for (pix, pred_group_indices) in enumerate(all_indices_by_pred_groups): for (gix, group_indices) in enumerate(ref_indices_by_group): intersection_counts = np.intersect1d(pred_group_indices, group_indices).shape[0] cost = ((- 1) * intersection_counts) cost_normed = (1 - (intersection_counts / len(group_indices))) output = f'{pix} {gix} {intersection_counts:4d} {len(group_indices):4d} {(intersection_counts / len(group_indices)):<.3f}' if verbose: print(output) cost_matrix[pix][gix] = cost cost_matrix_normed[pix][gix] = cost_normed if verbose: print() if norm_cost_matrix: cost_matrix = cost_matrix_normed (row_ind, col_ind) = linear_sum_assignment(cost_matrix) print(f'Hungarian assignment: {col_ind}') dataset = dataloader.dataset pred_group_labels = np.zeros(len(dataloader.dataset.targets_all['target'])) for (pix, pred_group_indices) in enumerate(all_indices_by_pred_groups): pred_group_labels[pred_group_indices] = col_ind[pix] acc = ((pred_group_labels == dataset.targets_all['group_idx']).sum() / len(pred_group_labels)) print(f'Acc: {(acc * 100):<.3f}%') print('Precision, Recall, F1-Score (%)') print('- Average by:') micro_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='micro') prf = ' '.join([f'{(m * 100):<.3f}' for m in micro_prf[:3]]) print(f' - micro: {prf}') macro_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='macro') prf = ' '.join([f'{(m * 100):<.3f}' for m in macro_prf[:3]]) print(f' - macro: {prf}') weighted_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='weighted') prf = ' '.join([f'{(m * 100):<.3f}' for m in weighted_prf[:3]]) print(f' - weighted: {prf}') if save: save_path = join(save_dir, f'pred_groups_{save_name}.npy') with open(save_path, 'wb') as f: np.save(f, pred_group_labels) print(f'Saved group predictions to {save_path}') return (pred_group_labels, (micro_prf, macro_prf, weighted_prf))
def compute_group_labels(embeddings, all_indices, dataloader, cluster_method='kmeans', n_clusters=2, save_name=None, ix=None, save=True, norm_cost_matrix=True, save_dir='./group_predictions', verbose=False, seed=42): '\n Compute group labels given embeddings\n - Will also report precision, recall, f1-score for the groups (assuming best-assignment)\n - Note this requires referencing the true group labels, and then mapping arbirtrary group indices generated by the clustering to an assignment that best matches these true group labels\n - But GDRO sees any group index assignment as the same. \n - There is no advantage with mapping the arbitrary group indices here\n ' all_cluster_labels = [] all_indices_by_pred_groups = [] ref_indices_by_class = get_indices_by_class(dataloader.dataset) ref_indices_by_group = get_indices_by_groups(dataloader.dataset) for (cix, class_indices) in enumerate(ref_indices_by_class): (cluster_labels, cluster_correct) = compute_clusters(embeddings[class_indices], cluster_method, n_clusters, indices=all_indices[class_indices], dataloader=dataloader, seed=seed) for c in np.unique(cluster_labels): all_indices_by_pred_groups.append(class_indices[np.where((cluster_labels == c))[0]]) pred_group_labels = np.zeros(len(all_indices)) for (g, indices) in enumerate(all_indices_by_pred_groups): for i in indices: pred_group_labels[i] = g if save: save_path = join(save_dir, f'pred_groups_{save_name}.npy') with open(save_path, 'wb') as f: np.save(f, pred_group_labels) print(f'Saved group predictions to {save_path}') return (pred_group_labels, None) cost_matrix = np.zeros((len(all_indices_by_pred_groups), len(ref_indices_by_group))) cost_matrix_normed = np.zeros((len(all_indices_by_pred_groups), len(ref_indices_by_group))) for (pix, pred_group_indices) in enumerate(all_indices_by_pred_groups): for (gix, group_indices) in enumerate(ref_indices_by_group): intersection_counts = np.intersect1d(pred_group_indices, group_indices).shape[0] cost = ((- 1) * intersection_counts) cost_normed = (1 - (intersection_counts / len(group_indices))) output = f'{pix} {gix} {intersection_counts:4d} {len(group_indices):4d} {(intersection_counts / len(group_indices)):<.3f}' if verbose: print(output) cost_matrix[pix][gix] = cost cost_matrix_normed[pix][gix] = cost_normed if verbose: print() if norm_cost_matrix: cost_matrix = cost_matrix_normed (row_ind, col_ind) = linear_sum_assignment(cost_matrix) print(f'Hungarian assignment: {col_ind}') dataset = dataloader.dataset pred_group_labels = np.zeros(len(dataloader.dataset.targets_all['target'])) for (pix, pred_group_indices) in enumerate(all_indices_by_pred_groups): pred_group_labels[pred_group_indices] = col_ind[pix] acc = ((pred_group_labels == dataset.targets_all['group_idx']).sum() / len(pred_group_labels)) print(f'Acc: {(acc * 100):<.3f}%') print('Precision, Recall, F1-Score (%)') print('- Average by:') micro_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='micro') prf = ' '.join([f'{(m * 100):<.3f}' for m in micro_prf[:3]]) print(f' - micro: {prf}') macro_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='macro') prf = ' '.join([f'{(m * 100):<.3f}' for m in macro_prf[:3]]) print(f' - macro: {prf}') weighted_prf = precision_recall_fscore_support(dataset.targets_all['group_idx'], pred_group_labels, average='weighted') prf = ' '.join([f'{(m * 100):<.3f}' for m in weighted_prf[:3]]) print(f' - weighted: {prf}') if save: save_path = join(save_dir, f'pred_groups_{save_name}.npy') with open(save_path, 'wb') as f: np.save(f, pred_group_labels) print(f'Saved group predictions to {save_path}') return (pred_group_labels, (micro_prf, macro_prf, weighted_prf))<|docstring|>Compute group labels given embeddings - Will also report precision, recall, f1-score for the groups (assuming best-assignment) - Note this requires referencing the true group labels, and then mapping arbirtrary group indices generated by the clustering to an assignment that best matches these true group labels - But GDRO sees any group index assignment as the same. - There is no advantage with mapping the arbitrary group indices here<|endoftext|>
2fd98c51950d528409cee7bc5b734167c6579ac49f1ca95b10bf5753d7dbfc2b
def check_for_transition(self, pos, string, date_min, date_max, dt, check_time, check_ms): '\n Function that checks for transitions between years, months, days etc..\n adds an additional row of information below the tick labels if\n required, to indicate the year on each side of the transition.\n ' different_date = False different_time = False trim = 0 date_start = 0 time_start = 11 date_end = 10 time_end = 0 different_date = (date_min[:date_end] != date_max[:date_end]) if check_time: if check_ms: time_end = 23 else: time_end = 19 different_time = (date_min[time_start:time_end] != date_max[time_start:time_end]) if ((not different_date) and (not different_time)): trim = max(date_end, time_end) else: if (different_date and different_time): start = date_start end = time_end elif different_date: start = date_start end = date_end elif different_time: start = time_start end = time_end trim = (time_start - 1) if ((pos != 0) and (dt[start:end] not in self.indicators)): string += ('\n' + dt[start:end]) self.indicators.append(dt[start:end]) return (string, trim)
Function that checks for transitions between years, months, days etc.. adds an additional row of information below the tick labels if required, to indicate the year on each side of the transition.
python/src/scipp/plotting/formatters.py
check_for_transition
g5t/scipp
0
python
def check_for_transition(self, pos, string, date_min, date_max, dt, check_time, check_ms): '\n Function that checks for transitions between years, months, days etc..\n adds an additional row of information below the tick labels if\n required, to indicate the year on each side of the transition.\n ' different_date = False different_time = False trim = 0 date_start = 0 time_start = 11 date_end = 10 time_end = 0 different_date = (date_min[:date_end] != date_max[:date_end]) if check_time: if check_ms: time_end = 23 else: time_end = 19 different_time = (date_min[time_start:time_end] != date_max[time_start:time_end]) if ((not different_date) and (not different_time)): trim = max(date_end, time_end) else: if (different_date and different_time): start = date_start end = time_end elif different_date: start = date_start end = date_end elif different_time: start = time_start end = time_end trim = (time_start - 1) if ((pos != 0) and (dt[start:end] not in self.indicators)): string += ('\n' + dt[start:end]) self.indicators.append(dt[start:end]) return (string, trim)
def check_for_transition(self, pos, string, date_min, date_max, dt, check_time, check_ms): '\n Function that checks for transitions between years, months, days etc..\n adds an additional row of information below the tick labels if\n required, to indicate the year on each side of the transition.\n ' different_date = False different_time = False trim = 0 date_start = 0 time_start = 11 date_end = 10 time_end = 0 different_date = (date_min[:date_end] != date_max[:date_end]) if check_time: if check_ms: time_end = 23 else: time_end = 19 different_time = (date_min[time_start:time_end] != date_max[time_start:time_end]) if ((not different_date) and (not different_time)): trim = max(date_end, time_end) else: if (different_date and different_time): start = date_start end = time_end elif different_date: start = date_start end = date_end elif different_time: start = time_start end = time_end trim = (time_start - 1) if ((pos != 0) and (dt[start:end] not in self.indicators)): string += ('\n' + dt[start:end]) self.indicators.append(dt[start:end]) return (string, trim)<|docstring|>Function that checks for transitions between years, months, days etc.. adds an additional row of information below the tick labels if required, to indicate the year on each side of the transition.<|endoftext|>
f1b9263ba9b07414157639b50e7e1f585ceea3f802030ad76c355d428c6c0f80
def is_audio(self) -> dict: '\n Is this stream labelled as an audio stream?\n ' return (self.__dict__.get('codec_type', None) == 'audio')
Is this stream labelled as an audio stream?
ffprobe3/ffprobe.py
is_audio
PlantDaddy/ffprobe3
0
python
def is_audio(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'audio')
def is_audio(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'audio')<|docstring|>Is this stream labelled as an audio stream?<|endoftext|>
1e962f9e918c7017e817d9f0743847869e3107ce0be63ad26f3b4a44525e7c7e
def is_video(self) -> dict: '\n Is the stream labelled as a video stream.\n ' return (self.__dict__.get('codec_type', None) == 'video')
Is the stream labelled as a video stream.
ffprobe3/ffprobe.py
is_video
PlantDaddy/ffprobe3
0
python
def is_video(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'video')
def is_video(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'video')<|docstring|>Is the stream labelled as a video stream.<|endoftext|>
b7f8650d7df5aefdfa5f01b2731e5fa0bd8044be52ac34bb04d2ec53a3fed311
def is_subtitle(self) -> dict: '\n Is the stream labelled as a subtitle stream.\n ' return (self.__dict__.get('codec_type', None) == 'subtitle')
Is the stream labelled as a subtitle stream.
ffprobe3/ffprobe.py
is_subtitle
PlantDaddy/ffprobe3
0
python
def is_subtitle(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'subtitle')
def is_subtitle(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'subtitle')<|docstring|>Is the stream labelled as a subtitle stream.<|endoftext|>
b01fa7032ad0c9c24bff22b3dad4375dc3d652779754828f1984de522f40bf89
def is_attachment(self) -> dict: '\n Is the stream labelled as a attachment stream.\n ' return (self.__dict__.get('codec_type', None) == 'attachment')
Is the stream labelled as a attachment stream.
ffprobe3/ffprobe.py
is_attachment
PlantDaddy/ffprobe3
0
python
def is_attachment(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'attachment')
def is_attachment(self) -> dict: '\n \n ' return (self.__dict__.get('codec_type', None) == 'attachment')<|docstring|>Is the stream labelled as a attachment stream.<|endoftext|>
afd123ba551da2a60f2f00103f68db6eecf48fa0829d9b258bccdc3f33010333
def frame_size(self) -> tuple: '\n Returns the pixel frame size as an integer tuple (width,height) if the stream is a video stream.\n Returns None if it is not a video stream.\n ' size = None if self.is_video(): width = self.__dict__['width'] height = self.__dict__['height'] if (width and height): try: size = (int(width), int(height)) except ValueError: raise FFProbeError('None integer size {}:{}'.format(width, height)) else: return None return size
Returns the pixel frame size as an integer tuple (width,height) if the stream is a video stream. Returns None if it is not a video stream.
ffprobe3/ffprobe.py
frame_size
PlantDaddy/ffprobe3
0
python
def frame_size(self) -> tuple: '\n Returns the pixel frame size as an integer tuple (width,height) if the stream is a video stream.\n Returns None if it is not a video stream.\n ' size = None if self.is_video(): width = self.__dict__['width'] height = self.__dict__['height'] if (width and height): try: size = (int(width), int(height)) except ValueError: raise FFProbeError('None integer size {}:{}'.format(width, height)) else: return None return size
def frame_size(self) -> tuple: '\n Returns the pixel frame size as an integer tuple (width,height) if the stream is a video stream.\n Returns None if it is not a video stream.\n ' size = None if self.is_video(): width = self.__dict__['width'] height = self.__dict__['height'] if (width and height): try: size = (int(width), int(height)) except ValueError: raise FFProbeError('None integer size {}:{}'.format(width, height)) else: return None return size<|docstring|>Returns the pixel frame size as an integer tuple (width,height) if the stream is a video stream. Returns None if it is not a video stream.<|endoftext|>
06bc9850fa363475514aabf1b038af49fe9901bc3bb26dcc18ce0011c90100b5
def pixel_format(self) -> dict: '\n Returns a string representing the pixel format of the video stream. e.g. yuv420p.\n Returns none is it is not a video stream.\n ' return self.__dict__.get('pix_fmt', None)
Returns a string representing the pixel format of the video stream. e.g. yuv420p. Returns none is it is not a video stream.
ffprobe3/ffprobe.py
pixel_format
PlantDaddy/ffprobe3
0
python
def pixel_format(self) -> dict: '\n Returns a string representing the pixel format of the video stream. e.g. yuv420p.\n Returns none is it is not a video stream.\n ' return self.__dict__.get('pix_fmt', None)
def pixel_format(self) -> dict: '\n Returns a string representing the pixel format of the video stream. e.g. yuv420p.\n Returns none is it is not a video stream.\n ' return self.__dict__.get('pix_fmt', None)<|docstring|>Returns a string representing the pixel format of the video stream. e.g. yuv420p. Returns none is it is not a video stream.<|endoftext|>
cac974301786eab32f4a2d7376c8b180305336ea58740416361fabeb015f4d6e
def frames(self) -> int: '\n Returns the length of a video stream in frames. Returns 0 if not a video stream.\n ' if (self.is_video() or self.is_audio()): try: frame_count = int(self.__dict__.get('nb_frames', '')) except ValueError: raise FFProbeError('None integer frame count') else: frame_count = 0 return frame_count
Returns the length of a video stream in frames. Returns 0 if not a video stream.
ffprobe3/ffprobe.py
frames
PlantDaddy/ffprobe3
0
python
def frames(self) -> int: '\n \n ' if (self.is_video() or self.is_audio()): try: frame_count = int(self.__dict__.get('nb_frames', )) except ValueError: raise FFProbeError('None integer frame count') else: frame_count = 0 return frame_count
def frames(self) -> int: '\n \n ' if (self.is_video() or self.is_audio()): try: frame_count = int(self.__dict__.get('nb_frames', )) except ValueError: raise FFProbeError('None integer frame count') else: frame_count = 0 return frame_count<|docstring|>Returns the length of a video stream in frames. Returns 0 if not a video stream.<|endoftext|>
4c061862293c6b200079cb665bff1f4ffcbb75f92ae6f33f3792951aa35a04fb
def duration_seconds(self) -> int: '\n Returns the runtime duration of the video stream as a floating point number of seconds.\n Returns 0.0 if not a video stream.\n ' if (self.is_video() or self.is_audio()): try: duration = float(self.__dict__.get('duration', '')) except ValueError: raise FFProbeError('None numeric duration') else: duration = 0.0 return duration
Returns the runtime duration of the video stream as a floating point number of seconds. Returns 0.0 if not a video stream.
ffprobe3/ffprobe.py
duration_seconds
PlantDaddy/ffprobe3
0
python
def duration_seconds(self) -> int: '\n Returns the runtime duration of the video stream as a floating point number of seconds.\n Returns 0.0 if not a video stream.\n ' if (self.is_video() or self.is_audio()): try: duration = float(self.__dict__.get('duration', )) except ValueError: raise FFProbeError('None numeric duration') else: duration = 0.0 return duration
def duration_seconds(self) -> int: '\n Returns the runtime duration of the video stream as a floating point number of seconds.\n Returns 0.0 if not a video stream.\n ' if (self.is_video() or self.is_audio()): try: duration = float(self.__dict__.get('duration', )) except ValueError: raise FFProbeError('None numeric duration') else: duration = 0.0 return duration<|docstring|>Returns the runtime duration of the video stream as a floating point number of seconds. Returns 0.0 if not a video stream.<|endoftext|>
d52c93021e456ff46887b4eede675357bb2a268bb89e06be1a315aff5b52f728
def language(self) -> str: '\n Returns language tag of stream. e.g. eng\n ' return self.__dict__.get('TAG:language', None)
Returns language tag of stream. e.g. eng
ffprobe3/ffprobe.py
language
PlantDaddy/ffprobe3
0
python
def language(self) -> str: '\n \n ' return self.__dict__.get('TAG:language', None)
def language(self) -> str: '\n \n ' return self.__dict__.get('TAG:language', None)<|docstring|>Returns language tag of stream. e.g. eng<|endoftext|>
e55916f40f17027b1335ffc434a6a42be1e2a19dc44ecd1e92ad238450e18ac3
def codec(self) -> str: '\n Returns a string representation of the stream codec.\n ' return self.__dict__.get('codec_name', None)
Returns a string representation of the stream codec.
ffprobe3/ffprobe.py
codec
PlantDaddy/ffprobe3
0
python
def codec(self) -> str: '\n \n ' return self.__dict__.get('codec_name', None)
def codec(self) -> str: '\n \n ' return self.__dict__.get('codec_name', None)<|docstring|>Returns a string representation of the stream codec.<|endoftext|>
8c5ee3c01617870192baecdc3c9c84a64e163bcdf790b67eca16c0890aa3a784
def codec_description(self) -> str: '\n Returns a long representation of the stream codec.\n ' return self.__dict__.get('codec_long_name', None)
Returns a long representation of the stream codec.
ffprobe3/ffprobe.py
codec_description
PlantDaddy/ffprobe3
0
python
def codec_description(self) -> str: '\n \n ' return self.__dict__.get('codec_long_name', None)
def codec_description(self) -> str: '\n \n ' return self.__dict__.get('codec_long_name', None)<|docstring|>Returns a long representation of the stream codec.<|endoftext|>
5132146e54d5684879b5e0903d2bfddca746e14d69f9efd01f8e21fbf85f19fe
def codec_tag(self) -> str: '\n Returns a short representative tag of the stream codec.\n ' return self.__dict__.get('codec_tag_string', None)
Returns a short representative tag of the stream codec.
ffprobe3/ffprobe.py
codec_tag
PlantDaddy/ffprobe3
0
python
def codec_tag(self) -> str: '\n \n ' return self.__dict__.get('codec_tag_string', None)
def codec_tag(self) -> str: '\n \n ' return self.__dict__.get('codec_tag_string', None)<|docstring|>Returns a short representative tag of the stream codec.<|endoftext|>
77fc96ee60a54a38f53ea1952b89bcd793da1267d6445342273ac89470e98129
def bit_rate(self) -> int: '\n Returns bit_rate as an integer in bps\n ' try: return int(self.__dict__.get('bit_rate', '')) except ValueError: raise FFProbeError('None integer bit_rate')
Returns bit_rate as an integer in bps
ffprobe3/ffprobe.py
bit_rate
PlantDaddy/ffprobe3
0
python
def bit_rate(self) -> int: '\n \n ' try: return int(self.__dict__.get('bit_rate', )) except ValueError: raise FFProbeError('None integer bit_rate')
def bit_rate(self) -> int: '\n \n ' try: return int(self.__dict__.get('bit_rate', )) except ValueError: raise FFProbeError('None integer bit_rate')<|docstring|>Returns bit_rate as an integer in bps<|endoftext|>
c75be513d27ffe09cc74d8efa3bd20fcef8e6d56a8ce9025a0ee521d4b15fb4b
def container_bitrate(self) -> int: '\n Returns container_bitrate as an integer in bps\n ' try: return int(self.__dict__.get('container_bitrate', '')) except ValueError: raise FFProbeError('None integer container_bitrate')
Returns container_bitrate as an integer in bps
ffprobe3/ffprobe.py
container_bitrate
PlantDaddy/ffprobe3
0
python
def container_bitrate(self) -> int: '\n \n ' try: return int(self.__dict__.get('container_bitrate', )) except ValueError: raise FFProbeError('None integer container_bitrate')
def container_bitrate(self) -> int: '\n \n ' try: return int(self.__dict__.get('container_bitrate', )) except ValueError: raise FFProbeError('None integer container_bitrate')<|docstring|>Returns container_bitrate as an integer in bps<|endoftext|>
9601c5a0b24d6eaded97f09bb68e8779d4512121cb3421ce767c2fa9d4d81ca7
def test_create_file(self): 'Test the creation of a simple XlsxWriter file.' workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() chart = workbook.add_chart({'type': 'line'}) date_format = workbook.add_format({'num_format': 14}) chart.axis_ids = [51761152, 51762688] worksheet.set_column('A:A', 12) dates = [date(2013, 1, 1), date(2013, 1, 2), date(2013, 1, 3), date(2013, 1, 4), date(2013, 1, 5), date(2013, 1, 6), date(2013, 1, 7), date(2013, 1, 8), date(2013, 1, 9), date(2013, 1, 10)] values = [10, 30, 20, 40, 20, 60, 50, 40, 30, 30] worksheet.write_column('A1', dates, date_format) worksheet.write_column('B1', values) chart.add_series({'categories': '=Sheet1!$A$1:$A$10', 'values': '=Sheet1!$B$1:$B$10'}) chart.set_x_axis({'date_axis': True, 'minor_unit': 1, 'major_unit': 1, 'num_format': 'dd/mm/yyyy', 'num_format_linked': True}) worksheet.insert_chart('E9', chart) workbook.close() self.assertExcelEqual()
Test the creation of a simple XlsxWriter file.
xlsxwriter/test/comparison/test_chart_date03.py
test_create_file
xubiuit/XlsxWriter
0
python
def test_create_file(self): workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() chart = workbook.add_chart({'type': 'line'}) date_format = workbook.add_format({'num_format': 14}) chart.axis_ids = [51761152, 51762688] worksheet.set_column('A:A', 12) dates = [date(2013, 1, 1), date(2013, 1, 2), date(2013, 1, 3), date(2013, 1, 4), date(2013, 1, 5), date(2013, 1, 6), date(2013, 1, 7), date(2013, 1, 8), date(2013, 1, 9), date(2013, 1, 10)] values = [10, 30, 20, 40, 20, 60, 50, 40, 30, 30] worksheet.write_column('A1', dates, date_format) worksheet.write_column('B1', values) chart.add_series({'categories': '=Sheet1!$A$1:$A$10', 'values': '=Sheet1!$B$1:$B$10'}) chart.set_x_axis({'date_axis': True, 'minor_unit': 1, 'major_unit': 1, 'num_format': 'dd/mm/yyyy', 'num_format_linked': True}) worksheet.insert_chart('E9', chart) workbook.close() self.assertExcelEqual()
def test_create_file(self): workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() chart = workbook.add_chart({'type': 'line'}) date_format = workbook.add_format({'num_format': 14}) chart.axis_ids = [51761152, 51762688] worksheet.set_column('A:A', 12) dates = [date(2013, 1, 1), date(2013, 1, 2), date(2013, 1, 3), date(2013, 1, 4), date(2013, 1, 5), date(2013, 1, 6), date(2013, 1, 7), date(2013, 1, 8), date(2013, 1, 9), date(2013, 1, 10)] values = [10, 30, 20, 40, 20, 60, 50, 40, 30, 30] worksheet.write_column('A1', dates, date_format) worksheet.write_column('B1', values) chart.add_series({'categories': '=Sheet1!$A$1:$A$10', 'values': '=Sheet1!$B$1:$B$10'}) chart.set_x_axis({'date_axis': True, 'minor_unit': 1, 'major_unit': 1, 'num_format': 'dd/mm/yyyy', 'num_format_linked': True}) worksheet.insert_chart('E9', chart) workbook.close() self.assertExcelEqual()<|docstring|>Test the creation of a simple XlsxWriter file.<|endoftext|>
14981517ab09248baa06042847c56674ce3859eac34192b17afad28ca4077bcf
def declare(*, group: Enum, version: int, event_types: List[int]) -> callable: '\n Declare class builder for a particular MOF group version and event types\n :param group: Event trace group\n :param version: version of mof class\n :param event_types: list of event type\n :return: cls\n ' def wrapper(cls): for event_type in event_types: if (group not in __mof_factory__.keys()): __mof_factory__[group] = {} if (version not in __mof_factory__[group].keys()): __mof_factory__[group][version] = {} __mof_factory__[group][version][event_type] = cls return cls return wrapper
Declare class builder for a particular MOF group version and event types :param group: Event trace group :param version: version of mof class :param event_types: list of event type :return: cls
etl/parsers/kernel/core.py
declare
IMULMUL/etl-parser
104
python
def declare(*, group: Enum, version: int, event_types: List[int]) -> callable: '\n Declare class builder for a particular MOF group version and event types\n :param group: Event trace group\n :param version: version of mof class\n :param event_types: list of event type\n :return: cls\n ' def wrapper(cls): for event_type in event_types: if (group not in __mof_factory__.keys()): __mof_factory__[group] = {} if (version not in __mof_factory__[group].keys()): __mof_factory__[group][version] = {} __mof_factory__[group][version][event_type] = cls return cls return wrapper
def declare(*, group: Enum, version: int, event_types: List[int]) -> callable: '\n Declare class builder for a particular MOF group version and event types\n :param group: Event trace group\n :param version: version of mof class\n :param event_types: list of event type\n :return: cls\n ' def wrapper(cls): for event_type in event_types: if (group not in __mof_factory__.keys()): __mof_factory__[group] = {} if (version not in __mof_factory__[group].keys()): __mof_factory__[group][version] = {} __mof_factory__[group][version][event_type] = cls return cls return wrapper<|docstring|>Declare class builder for a particular MOF group version and event types :param group: Event trace group :param version: version of mof class :param event_types: list of event type :return: cls<|endoftext|>
d04894c61b9a2ef1d128e10bb9e84f5a15b51f14e59453efab957d2d558f9799
def build_mof(group: Enum, version: int, event_type: int, mof_data: bytes) -> Mof: '\n The MOF factory\n :param group: event trace group\n :param version: event trace version of mof\n :param event_type: event type handle by mof structure\n :param mof_data: raw mof data\n :return:\n ' if (group not in __mof_factory__.keys()): raise GroupNotFound(group) if (version not in __mof_factory__[group].keys()): raise VersionNotFound(group, version) if (event_type not in __mof_factory__[group][version].keys()): raise EventTypeNotFound(group, version, event_type) return __mof_factory__[group][version][event_type](mof_data, event_type)
The MOF factory :param group: event trace group :param version: event trace version of mof :param event_type: event type handle by mof structure :param mof_data: raw mof data :return:
etl/parsers/kernel/core.py
build_mof
IMULMUL/etl-parser
104
python
def build_mof(group: Enum, version: int, event_type: int, mof_data: bytes) -> Mof: '\n The MOF factory\n :param group: event trace group\n :param version: event trace version of mof\n :param event_type: event type handle by mof structure\n :param mof_data: raw mof data\n :return:\n ' if (group not in __mof_factory__.keys()): raise GroupNotFound(group) if (version not in __mof_factory__[group].keys()): raise VersionNotFound(group, version) if (event_type not in __mof_factory__[group][version].keys()): raise EventTypeNotFound(group, version, event_type) return __mof_factory__[group][version][event_type](mof_data, event_type)
def build_mof(group: Enum, version: int, event_type: int, mof_data: bytes) -> Mof: '\n The MOF factory\n :param group: event trace group\n :param version: event trace version of mof\n :param event_type: event type handle by mof structure\n :param mof_data: raw mof data\n :return:\n ' if (group not in __mof_factory__.keys()): raise GroupNotFound(group) if (version not in __mof_factory__[group].keys()): raise VersionNotFound(group, version) if (event_type not in __mof_factory__[group][version].keys()): raise EventTypeNotFound(group, version, event_type) return __mof_factory__[group][version][event_type](mof_data, event_type)<|docstring|>The MOF factory :param group: event trace group :param version: event trace version of mof :param event_type: event type handle by mof structure :param mof_data: raw mof data :return:<|endoftext|>
2a3b3c3e0756899a1b442fe7062e9a03bd6b582b8f915d32176ed2bb06fc2815
def parse(self, mof_data) -> Container: '\n Parse mof data stream\n :param mof_data: raw mof data\n :return: construct container\n ' return self.pattern.parse(mof_data)
Parse mof data stream :param mof_data: raw mof data :return: construct container
etl/parsers/kernel/core.py
parse
IMULMUL/etl-parser
104
python
def parse(self, mof_data) -> Container: '\n Parse mof data stream\n :param mof_data: raw mof data\n :return: construct container\n ' return self.pattern.parse(mof_data)
def parse(self, mof_data) -> Container: '\n Parse mof data stream\n :param mof_data: raw mof data\n :return: construct container\n ' return self.pattern.parse(mof_data)<|docstring|>Parse mof data stream :param mof_data: raw mof data :return: construct container<|endoftext|>
df7ae067ac9b32736bad009c329c04ce69a31902dd4c7cc1e9890fdd4f2e8d78
def get_event_definition(self): '\n :return: name of event definition\n ' return self.event_types[self.event_type]
:return: name of event definition
etl/parsers/kernel/core.py
get_event_definition
IMULMUL/etl-parser
104
python
def get_event_definition(self): '\n \n ' return self.event_types[self.event_type]
def get_event_definition(self): '\n \n ' return self.event_types[self.event_type]<|docstring|>:return: name of event definition<|endoftext|>
64ebf2271f3ec825e99dcc49980411fb2c0b70ca577ef367f9414a843ac78eba
def main(args): '\n Copy binaires.\n ' if (len(args) != 3): print('Usage copy_binaries.py Configuration destination') return conf = args[1] if (conf not in ['Debug', 'Release']): raise ValueError("Unknown configuration '{0}'".format(conf)) dest = args[2] if (not os.path.exists(dest)): os.makedirs(dest) folder = os.path.join('machinelearningext') names = {} (file, rep) = ([], []) for (r, d, f) in os.walk(folder): for a in f: full = os.path.join(r, a) ext = os.path.splitext(a)[(- 1)] if (ext not in {'.so', '.dll', '.pdb', '.json'}): continue if ('Scikit.ML' not in a): continue last_name = os.path.split(a)[(- 1)] if (last_name not in names): names[last_name] = full for (_, name) in sorted(names.items()): print("copy '{0}'".format(name)) shutil.copy(name, dest) shutil.copy(os.path.join('machinelearning', 'BuildToolsVersion.txt'), dest) shutil.copy(os.path.join('machinelearning', 'THIRD-PARTY-NOTICES.TXT'), dest)
Copy binaires.
copy_binaries.py
main
xadupre/machinelearningext
2
python
def main(args): '\n \n ' if (len(args) != 3): print('Usage copy_binaries.py Configuration destination') return conf = args[1] if (conf not in ['Debug', 'Release']): raise ValueError("Unknown configuration '{0}'".format(conf)) dest = args[2] if (not os.path.exists(dest)): os.makedirs(dest) folder = os.path.join('machinelearningext') names = {} (file, rep) = ([], []) for (r, d, f) in os.walk(folder): for a in f: full = os.path.join(r, a) ext = os.path.splitext(a)[(- 1)] if (ext not in {'.so', '.dll', '.pdb', '.json'}): continue if ('Scikit.ML' not in a): continue last_name = os.path.split(a)[(- 1)] if (last_name not in names): names[last_name] = full for (_, name) in sorted(names.items()): print("copy '{0}'".format(name)) shutil.copy(name, dest) shutil.copy(os.path.join('machinelearning', 'BuildToolsVersion.txt'), dest) shutil.copy(os.path.join('machinelearning', 'THIRD-PARTY-NOTICES.TXT'), dest)
def main(args): '\n \n ' if (len(args) != 3): print('Usage copy_binaries.py Configuration destination') return conf = args[1] if (conf not in ['Debug', 'Release']): raise ValueError("Unknown configuration '{0}'".format(conf)) dest = args[2] if (not os.path.exists(dest)): os.makedirs(dest) folder = os.path.join('machinelearningext') names = {} (file, rep) = ([], []) for (r, d, f) in os.walk(folder): for a in f: full = os.path.join(r, a) ext = os.path.splitext(a)[(- 1)] if (ext not in {'.so', '.dll', '.pdb', '.json'}): continue if ('Scikit.ML' not in a): continue last_name = os.path.split(a)[(- 1)] if (last_name not in names): names[last_name] = full for (_, name) in sorted(names.items()): print("copy '{0}'".format(name)) shutil.copy(name, dest) shutil.copy(os.path.join('machinelearning', 'BuildToolsVersion.txt'), dest) shutil.copy(os.path.join('machinelearning', 'THIRD-PARTY-NOTICES.TXT'), dest)<|docstring|>Copy binaires.<|endoftext|>
98116e3348b17a60b548438b05088289feec2429adbb44ed885ee4b44d9ea63b
def quote_xml(inStr): 'Escape markup chars, but do not modify CDATA sections.' if (not inStr): return '' s1 = ((isinstance(inStr, BaseStrType_) and inStr) or ('%s' % inStr)) s2 = '' pos = 0 matchobjects = CDATA_pattern_.finditer(s1) for mo in matchobjects: s3 = s1[pos:mo.start()] s2 += quote_xml_aux(s3) s2 += s1[mo.start():mo.end()] pos = mo.end() s3 = s1[pos:] s2 += quote_xml_aux(s3) return s2
Escape markup chars, but do not modify CDATA sections.
py-aramex/pyaramex/Location.py
quote_xml
QwadwoNyamekye/purplship-carriers
0
python
def quote_xml(inStr): if (not inStr): return s1 = ((isinstance(inStr, BaseStrType_) and inStr) or ('%s' % inStr)) s2 = pos = 0 matchobjects = CDATA_pattern_.finditer(s1) for mo in matchobjects: s3 = s1[pos:mo.start()] s2 += quote_xml_aux(s3) s2 += s1[mo.start():mo.end()] pos = mo.end() s3 = s1[pos:] s2 += quote_xml_aux(s3) return s2
def quote_xml(inStr): if (not inStr): return s1 = ((isinstance(inStr, BaseStrType_) and inStr) or ('%s' % inStr)) s2 = pos = 0 matchobjects = CDATA_pattern_.finditer(s1) for mo in matchobjects: s3 = s1[pos:mo.start()] s2 += quote_xml_aux(s3) s2 += s1[mo.start():mo.end()] pos = mo.end() s3 = s1[pos:] s2 += quote_xml_aux(s3) return s2<|docstring|>Escape markup chars, but do not modify CDATA sections.<|endoftext|>
efab9ef9540fad750fecd21fde7719917b4c1f7acb582970b987eb278626ff6a
def parseString(inString, silence=False): 'Parse a string, create the object tree, and export it.\n\n Arguments:\n - inString -- A string. This XML fragment should not start\n with an XML declaration containing an encoding.\n - silence -- A boolean. If False, export the object.\n Returns -- The root object in the tree.\n ' parser = None rootNode = parsexmlstring_(inString, parser) (rootTag, rootClass) = get_root_tag(rootNode) if (rootClass is None): rootTag = 'AddressValidationRequest' rootClass = AddressValidationRequest rootObj = rootClass.factory() rootObj.build(rootNode) if (not silence): sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_='') return rootObj
Parse a string, create the object tree, and export it. Arguments: - inString -- A string. This XML fragment should not start with an XML declaration containing an encoding. - silence -- A boolean. If False, export the object. Returns -- The root object in the tree.
py-aramex/pyaramex/Location.py
parseString
QwadwoNyamekye/purplship-carriers
0
python
def parseString(inString, silence=False): 'Parse a string, create the object tree, and export it.\n\n Arguments:\n - inString -- A string. This XML fragment should not start\n with an XML declaration containing an encoding.\n - silence -- A boolean. If False, export the object.\n Returns -- The root object in the tree.\n ' parser = None rootNode = parsexmlstring_(inString, parser) (rootTag, rootClass) = get_root_tag(rootNode) if (rootClass is None): rootTag = 'AddressValidationRequest' rootClass = AddressValidationRequest rootObj = rootClass.factory() rootObj.build(rootNode) if (not silence): sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_=) return rootObj
def parseString(inString, silence=False): 'Parse a string, create the object tree, and export it.\n\n Arguments:\n - inString -- A string. This XML fragment should not start\n with an XML declaration containing an encoding.\n - silence -- A boolean. If False, export the object.\n Returns -- The root object in the tree.\n ' parser = None rootNode = parsexmlstring_(inString, parser) (rootTag, rootClass) = get_root_tag(rootNode) if (rootClass is None): rootTag = 'AddressValidationRequest' rootClass = AddressValidationRequest rootObj = rootClass.factory() rootObj.build(rootNode) if (not silence): sys.stdout.write('<?xml version="1.0" ?>\n') rootObj.export(sys.stdout, 0, name_=rootTag, namespacedef_=) return rootObj<|docstring|>Parse a string, create the object tree, and export it. Arguments: - inString -- A string. This XML fragment should not start with an XML declaration containing an encoding. - silence -- A boolean. If False, export the object. Returns -- The root object in the tree.<|endoftext|>
a56c369e822917fccbc5c61681bec228acb801a18aebde1dfb3d4acbea498c58
def getSubclassFromModule_(module, class_): 'Get the subclass of a class from a specific module.' name = (class_.__name__ + 'Sub') if hasattr(module, name): return getattr(module, name) else: return None
Get the subclass of a class from a specific module.
py-aramex/pyaramex/Location.py
getSubclassFromModule_
QwadwoNyamekye/purplship-carriers
0
python
def getSubclassFromModule_(module, class_): name = (class_.__name__ + 'Sub') if hasattr(module, name): return getattr(module, name) else: return None
def getSubclassFromModule_(module, class_): name = (class_.__name__ + 'Sub') if hasattr(module, name): return getattr(module, name) else: return None<|docstring|>Get the subclass of a class from a specific module.<|endoftext|>
e6df1c5b18ecbef5c567ad5b7ea7f1f4c4a1a597dd4cf7ae1b7aecaf41879dbe
def parse(data): '\n Parse HCI ACL data\n\n References can be found here:\n * https://www.bluetooth.org/en-us/specification/adopted-specifications - Core specification 4.1\n ** [vol 2] Part E (Section 5) - HCI Data Formats\n ** [vol 2] Part E (Section 5.4) - Exchange of HCI-specific information\n\n ' hdr = ACL_HEADER() hdr.asbyte = struct.unpack('<I', data[:4])[0] handle = int(hdr.b.handle) pb = int(hdr.b.pb) bc = int(hdr.b.bc) length = int(hdr.b.length) return (handle, pb, bc, length, data[4:])
Parse HCI ACL data References can be found here: * https://www.bluetooth.org/en-us/specification/adopted-specifications - Core specification 4.1 ** [vol 2] Part E (Section 5) - HCI Data Formats ** [vol 2] Part E (Section 5.4) - Exchange of HCI-specific information
blesuite/replay/btsnoop/bt/hci_acl.py
parse
mdxs/BLESuite
198
python
def parse(data): '\n Parse HCI ACL data\n\n References can be found here:\n * https://www.bluetooth.org/en-us/specification/adopted-specifications - Core specification 4.1\n ** [vol 2] Part E (Section 5) - HCI Data Formats\n ** [vol 2] Part E (Section 5.4) - Exchange of HCI-specific information\n\n ' hdr = ACL_HEADER() hdr.asbyte = struct.unpack('<I', data[:4])[0] handle = int(hdr.b.handle) pb = int(hdr.b.pb) bc = int(hdr.b.bc) length = int(hdr.b.length) return (handle, pb, bc, length, data[4:])
def parse(data): '\n Parse HCI ACL data\n\n References can be found here:\n * https://www.bluetooth.org/en-us/specification/adopted-specifications - Core specification 4.1\n ** [vol 2] Part E (Section 5) - HCI Data Formats\n ** [vol 2] Part E (Section 5.4) - Exchange of HCI-specific information\n\n ' hdr = ACL_HEADER() hdr.asbyte = struct.unpack('<I', data[:4])[0] handle = int(hdr.b.handle) pb = int(hdr.b.pb) bc = int(hdr.b.bc) length = int(hdr.b.length) return (handle, pb, bc, length, data[4:])<|docstring|>Parse HCI ACL data References can be found here: * https://www.bluetooth.org/en-us/specification/adopted-specifications - Core specification 4.1 ** [vol 2] Part E (Section 5) - HCI Data Formats ** [vol 2] Part E (Section 5.4) - Exchange of HCI-specific information<|endoftext|>
4ea58993f2080ca012a357ffa84c0a0e8c479d20718a164862da3af44b6601b5
def pb_to_str(pb): '\n Return a string representing the packet boundary flag\n ' assert (pb in [0, 1, 2, 3]) return PB_FLAGS[pb]
Return a string representing the packet boundary flag
blesuite/replay/btsnoop/bt/hci_acl.py
pb_to_str
mdxs/BLESuite
198
python
def pb_to_str(pb): '\n \n ' assert (pb in [0, 1, 2, 3]) return PB_FLAGS[pb]
def pb_to_str(pb): '\n \n ' assert (pb in [0, 1, 2, 3]) return PB_FLAGS[pb]<|docstring|>Return a string representing the packet boundary flag<|endoftext|>
95825b90603e4def39badbac247ecdc6406af679f631f814d2fa4e648a9d1488
def __confirmOperation(this, operation_name: str, message: str=''): '\n Called to ask user to confirm the operation before performing.\n\n Must be called for dangerous create/delete/update operations.\n ' print('Do you really want to perform the operation: ', operation_name) print(message) print('Enter Y to continue or N to stop execution: ', end='') while True: keyboard_input = input() if (keyboard_input in ['Y', 'y']): return elif (keyboard_input in ['N', 'n']): raise OperationWasNotConfirmed else: print('Your input is not correct!\nPlease, press Y to confirm or N to stop execution: ', end='')
Called to ask user to confirm the operation before performing. Must be called for dangerous create/delete/update operations.
awshelper.py
__confirmOperation
andreyess/AWS-helper
0
python
def __confirmOperation(this, operation_name: str, message: str=): '\n Called to ask user to confirm the operation before performing.\n\n Must be called for dangerous create/delete/update operations.\n ' print('Do you really want to perform the operation: ', operation_name) print(message) print('Enter Y to continue or N to stop execution: ', end=) while True: keyboard_input = input() if (keyboard_input in ['Y', 'y']): return elif (keyboard_input in ['N', 'n']): raise OperationWasNotConfirmed else: print('Your input is not correct!\nPlease, press Y to confirm or N to stop execution: ', end=)
def __confirmOperation(this, operation_name: str, message: str=): '\n Called to ask user to confirm the operation before performing.\n\n Must be called for dangerous create/delete/update operations.\n ' print('Do you really want to perform the operation: ', operation_name) print(message) print('Enter Y to continue or N to stop execution: ', end=) while True: keyboard_input = input() if (keyboard_input in ['Y', 'y']): return elif (keyboard_input in ['N', 'n']): raise OperationWasNotConfirmed else: print('Your input is not correct!\nPlease, press Y to confirm or N to stop execution: ', end=)<|docstring|>Called to ask user to confirm the operation before performing. Must be called for dangerous create/delete/update operations.<|endoftext|>
eeed0613690a81aee3c0ccac07f8f1a52b0bd63e06f12425f3f32632c1c24d94
def _getBotoClient(this, clientName: str, region: str=None): '\n Generates boto3 client for appropriate AWS service passed by clientName parameter\n using aws_access_key_id and aws_secret_access_key passed by object constructor.\n ' if (clientName not in this._boto_clients_supported): raise AwsServiceClientNotSupported print('Get client : {}\naccess_key_id: {}\nsecret_key_id: {}'.format(clientName, ''.join((('*' * (len(this._aws_access_key_id) - 4)) + this._aws_access_key_id[(- 4):])), ''.join((('*' * (len(this._aws_secret_access_key) - 4)) + this._aws_secret_access_key[(- 4):])))) clientKwargs = {'aws_access_key_id': this._aws_access_key_id, 'aws_secret_access_key': this._aws_secret_access_key} if region: clientKwargs['region_name'] = region return boto3.client(clientName, **clientKwargs)
Generates boto3 client for appropriate AWS service passed by clientName parameter using aws_access_key_id and aws_secret_access_key passed by object constructor.
awshelper.py
_getBotoClient
andreyess/AWS-helper
0
python
def _getBotoClient(this, clientName: str, region: str=None): '\n Generates boto3 client for appropriate AWS service passed by clientName parameter\n using aws_access_key_id and aws_secret_access_key passed by object constructor.\n ' if (clientName not in this._boto_clients_supported): raise AwsServiceClientNotSupported print('Get client : {}\naccess_key_id: {}\nsecret_key_id: {}'.format(clientName, .join((('*' * (len(this._aws_access_key_id) - 4)) + this._aws_access_key_id[(- 4):])), .join((('*' * (len(this._aws_secret_access_key) - 4)) + this._aws_secret_access_key[(- 4):])))) clientKwargs = {'aws_access_key_id': this._aws_access_key_id, 'aws_secret_access_key': this._aws_secret_access_key} if region: clientKwargs['region_name'] = region return boto3.client(clientName, **clientKwargs)
def _getBotoClient(this, clientName: str, region: str=None): '\n Generates boto3 client for appropriate AWS service passed by clientName parameter\n using aws_access_key_id and aws_secret_access_key passed by object constructor.\n ' if (clientName not in this._boto_clients_supported): raise AwsServiceClientNotSupported print('Get client : {}\naccess_key_id: {}\nsecret_key_id: {}'.format(clientName, .join((('*' * (len(this._aws_access_key_id) - 4)) + this._aws_access_key_id[(- 4):])), .join((('*' * (len(this._aws_secret_access_key) - 4)) + this._aws_secret_access_key[(- 4):])))) clientKwargs = {'aws_access_key_id': this._aws_access_key_id, 'aws_secret_access_key': this._aws_secret_access_key} if region: clientKwargs['region_name'] = region return boto3.client(clientName, **clientKwargs)<|docstring|>Generates boto3 client for appropriate AWS service passed by clientName parameter using aws_access_key_id and aws_secret_access_key passed by object constructor.<|endoftext|>