idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
60,600
def setDefaultFetcher ( fetcher , wrap_exceptions = True ) : global _default_fetcher if fetcher is None or not wrap_exceptions : _default_fetcher = fetcher else : _default_fetcher = ExceptionWrappingFetcher ( fetcher )
Set the default fetcher
60,601
def usingCurl ( ) : fetcher = getDefaultFetcher ( ) if isinstance ( fetcher , ExceptionWrappingFetcher ) : fetcher = fetcher . fetcher return isinstance ( fetcher , CurlHTTPFetcher )
Whether the currently set HTTP fetcher is a Curl HTTP fetcher .
60,602
def fetch ( self , url , body = None , headers = None ) : if body : method = 'POST' else : method = 'GET' if headers is None : headers = { } if not ( url . startswith ( 'http://' ) or url . startswith ( 'https://' ) ) : raise ValueError ( 'URL is not a HTTP URL: %r' % ( url , ) ) httplib2_response , content = self . httplib2 . request ( url , method , body = body , headers = headers ) try : final_url = httplib2_response [ 'content-location' ] except KeyError : assert not httplib2_response . previous assert httplib2_response . status != 200 final_url = url return HTTPResponse ( body = content , final_url = final_url , headers = dict ( httplib2_response . items ( ) ) , status = httplib2_response . status , )
Perform an HTTP request
60,603
def getServiceEndpoints ( input_url , flt = None ) : result = discover ( input_url ) try : endpoints = applyFilter ( result . normalized_uri , result . response_text , flt ) except XRDSError , err : raise DiscoveryFailure ( str ( err ) , None ) return ( result . normalized_uri , endpoints )
Perform the Yadis protocol on the input URL and return an iterable of resulting endpoint objects .
60,604
def applyFilter ( normalized_uri , xrd_data , flt = None ) : flt = mkFilter ( flt ) et = parseXRDS ( xrd_data ) endpoints = [ ] for service_element in iterServices ( et ) : endpoints . extend ( flt . getServiceEndpoints ( normalized_uri , service_element ) ) return endpoints
Generate an iterable of endpoint objects given this input data presumably from the result of performing the Yadis protocol .
60,605
def parseLinkAttrs ( html ) : stripped = removed_re . sub ( '' , html ) html_mo = html_find . search ( stripped ) if html_mo is None or html_mo . start ( 'contents' ) == - 1 : return [ ] start , end = html_mo . span ( 'contents' ) head_mo = head_find . search ( stripped , start , end ) if head_mo is None or head_mo . start ( 'contents' ) == - 1 : return [ ] start , end = head_mo . span ( 'contents' ) link_mos = link_find . finditer ( stripped , head_mo . start ( ) , head_mo . end ( ) ) matches = [ ] for link_mo in link_mos : start = link_mo . start ( ) + 5 link_attrs = { } for attr_mo in attr_find . finditer ( stripped , start ) : if attr_mo . lastgroup == 'end_link' : break attr_name , q_val , unq_val = attr_mo . group ( 'attr_name' , 'q_val' , 'unq_val' ) attr_val = ent_replace . sub ( replaceEnt , unq_val or q_val ) link_attrs [ attr_name ] = attr_val matches . append ( link_attrs ) return matches
Find all link tags in a string representing a HTML document and return a list of their attributes .
60,606
def relMatches ( rel_attr , target_rel ) : rels = rel_attr . strip ( ) . split ( ) for rel in rels : rel = rel . lower ( ) if rel == target_rel : return 1 return 0
Does this target_rel appear in the rel_str?
60,607
def linkHasRel ( link_attrs , target_rel ) : rel_attr = link_attrs . get ( 'rel' ) return rel_attr and relMatches ( rel_attr , target_rel )
Does this link have target_rel as a relationship?
60,608
def findFirstHref ( link_attrs_list , target_rel ) : matches = findLinksRel ( link_attrs_list , target_rel ) if not matches : return None first = matches [ 0 ] return first . get ( 'href' )
Return the value of the href attribute for the first link tag in the list that has target_rel as a relationship .
60,609
def importElementTree ( module_names = None ) : if module_names is None : module_names = elementtree_modules for mod_name in module_names : try : ElementTree = __import__ ( mod_name , None , None , [ 'unused' ] ) except ImportError : pass else : try : ElementTree . XML ( '<unused/>' ) except ( SystemExit , MemoryError , AssertionError ) : raise except : logging . exception ( 'Not using ElementTree library %r because it failed to ' 'parse a trivial document: %s' % mod_name ) else : return ElementTree else : raise ImportError ( 'No ElementTree library found. ' 'You may need to install one. ' 'Tried importing %r' % ( module_names , ) )
Find a working ElementTree implementation trying the standard places that such a thing might show up .
60,610
def getYadisXRD ( xrd_tree ) : xrd = None for xrd in xrd_tree . findall ( xrd_tag ) : pass if xrd is None : raise XRDSError ( 'No XRD present in tree' ) return xrd
Return the XRD element that should contain the Yadis services
60,611
def getXRDExpiration ( xrd_element , default = None ) : expires_element = xrd_element . find ( expires_tag ) if expires_element is None : return default else : expires_string = expires_element . text expires_time = strptime ( expires_string , "%Y-%m-%dT%H:%M:%SZ" ) return datetime ( * expires_time [ 0 : 6 ] )
Return the expiration date of this XRD element or None if no expiration was specified .
60,612
def getCanonicalID ( iname , xrd_tree ) : xrd_list = xrd_tree . findall ( xrd_tag ) xrd_list . reverse ( ) try : canonicalID = xri . XRI ( xrd_list [ 0 ] . findall ( canonicalID_tag ) [ 0 ] . text ) except IndexError : return None childID = canonicalID . lower ( ) for xrd in xrd_list [ 1 : ] : parent_sought = childID [ : childID . rindex ( '!' ) ] parent = xri . XRI ( xrd . findtext ( canonicalID_tag ) ) if parent_sought != parent . lower ( ) : raise XRDSFraud ( "%r can not come from %s" % ( childID , parent ) ) childID = parent_sought root = xri . rootAuthority ( iname ) if not xri . providerIsAuthoritative ( root , childID ) : raise XRDSFraud ( "%r can not come from root %r" % ( childID , root ) ) return canonicalID
Return the CanonicalID from this XRDS document .
60,613
def getPriorityStrict ( element ) : prio_str = element . get ( 'priority' ) if prio_str is not None : prio_val = int ( prio_str ) if prio_val >= 0 : return prio_val else : raise ValueError ( 'Priority values must be non-negative integers' ) return Max
Get the priority of this element .
60,614
def makeKVPost ( request_message , server_url ) : resp = fetchers . fetch ( server_url , body = request_message . toURLEncoded ( ) ) return _httpResponseToMessage ( resp , server_url )
Make a Direct Request to an OpenID Provider and return the result as a Message object .
60,615
def _httpResponseToMessage ( response , server_url ) : response_message = Message . fromKVForm ( response . body ) if response . status == 400 : raise ServerError . fromMessage ( response_message ) elif response . status not in ( 200 , 206 ) : fmt = 'bad status code from server %s: %s' error_message = fmt % ( server_url , response . status ) raise fetchers . HTTPFetchingError ( error_message ) return response_message
Adapt a POST response to a Message .
60,616
def complete ( self , query , current_url ) : endpoint = self . session . get ( self . _token_key ) message = Message . fromPostArgs ( query ) response = self . consumer . complete ( message , endpoint , current_url ) try : del self . session [ self . _token_key ] except KeyError : pass if ( response . status in [ 'success' , 'cancel' ] and response . identity_url is not None ) : disco = Discovery ( self . session , response . identity_url , self . session_key_prefix ) disco . cleanup ( force = True ) return response
Called to interpret the server s response to an OpenID request . It is called in step 4 of the flow described in the consumer overview .
60,617
def fromMessage ( cls , message ) : error_text = message . getArg ( OPENID_NS , 'error' , '<no error message supplied>' ) error_code = message . getArg ( OPENID_NS , 'error_code' ) return cls ( error_text , error_code , message )
Generate a ServerError instance extracting the error text and the error code from the message .
60,618
def begin ( self , service_endpoint ) : if self . store is None : assoc = None else : assoc = self . _getAssociation ( service_endpoint ) request = AuthRequest ( service_endpoint , assoc ) request . return_to_args [ self . openid1_nonce_query_arg_name ] = mkNonce ( ) if request . message . isOpenID1 ( ) : request . return_to_args [ self . openid1_return_to_identifier_name ] = request . endpoint . claimed_id return request
Create an AuthRequest object for the specified service_endpoint . This method will create an association if necessary .
60,619
def complete ( self , message , endpoint , return_to ) : mode = message . getArg ( OPENID_NS , 'mode' , '<No mode set>' ) modeMethod = getattr ( self , '_complete_' + mode , self . _completeInvalid ) return modeMethod ( message , endpoint , return_to )
Process the OpenID message using the specified endpoint and return_to URL as context . This method will handle any OpenID message that is sent to the return_to URL .
60,620
def _checkSetupNeeded ( self , message ) : if message . isOpenID1 ( ) : user_setup_url = message . getArg ( OPENID1_NS , 'user_setup_url' ) if user_setup_url is not None : raise SetupNeededError ( user_setup_url )
Check an id_res message to see if it is a checkid_immediate cancel response .
60,621
def _doIdRes ( self , message , endpoint , return_to ) : self . _idResCheckForFields ( message ) if not self . _checkReturnTo ( message , return_to ) : raise ProtocolError ( "return_to does not match return URL. Expected %r, got %r" % ( return_to , message . getArg ( OPENID_NS , 'return_to' ) ) ) endpoint = self . _verifyDiscoveryResults ( message , endpoint ) logging . info ( "Received id_res response from %s using association %s" % ( endpoint . server_url , message . getArg ( OPENID_NS , 'assoc_handle' ) ) ) self . _idResCheckSignature ( message , endpoint . server_url ) self . _idResCheckNonce ( message , endpoint ) signed_list_str = message . getArg ( OPENID_NS , 'signed' , no_default ) signed_list = signed_list_str . split ( ',' ) signed_fields = [ "openid." + s for s in signed_list ] return SuccessResponse ( endpoint , message , signed_fields )
Handle id_res responses that are not cancellations of immediate mode requests .
60,622
def _verifyReturnToArgs ( query ) : message = Message . fromPostArgs ( query ) return_to = message . getArg ( OPENID_NS , 'return_to' ) if return_to is None : raise ProtocolError ( 'Response has no return_to' ) parsed_url = urlparse ( return_to ) rt_query = parsed_url [ 4 ] parsed_args = cgi . parse_qsl ( rt_query ) for rt_key , rt_value in parsed_args : try : value = query [ rt_key ] if rt_value != value : format = ( "parameter %s value %r does not match " "return_to's value %r" ) raise ProtocolError ( format % ( rt_key , value , rt_value ) ) except KeyError : format = "return_to parameter %s absent from query %r" raise ProtocolError ( format % ( rt_key , query ) ) bare_args = message . getArgs ( BARE_NS ) for pair in bare_args . iteritems ( ) : if pair not in parsed_args : raise ProtocolError ( "Parameter %s not in return_to URL" % ( pair [ 0 ] , ) )
Verify that the arguments in the return_to URL are present in this response .
60,623
def _verifyDiscoveryResults ( self , resp_msg , endpoint = None ) : if resp_msg . getOpenIDNamespace ( ) == OPENID2_NS : return self . _verifyDiscoveryResultsOpenID2 ( resp_msg , endpoint ) else : return self . _verifyDiscoveryResultsOpenID1 ( resp_msg , endpoint )
Extract the information from an OpenID assertion message and verify it against the original
60,624
def _verifyDiscoverySingle ( self , endpoint , to_match ) : for type_uri in to_match . type_uris : if not endpoint . usesExtension ( type_uri ) : raise TypeURIMismatch ( type_uri , endpoint ) defragged_claimed_id , _ = urldefrag ( to_match . claimed_id ) if defragged_claimed_id != endpoint . claimed_id : raise ProtocolError ( 'Claimed ID does not match (different subjects!), ' 'Expected %s, got %s' % ( defragged_claimed_id , endpoint . claimed_id ) ) if to_match . getLocalID ( ) != endpoint . getLocalID ( ) : raise ProtocolError ( 'local_id mismatch. Expected %s, got %s' % ( to_match . getLocalID ( ) , endpoint . getLocalID ( ) ) ) if to_match . server_url is None : assert to_match . preferredNamespace ( ) == OPENID1_NS , ( ) elif to_match . server_url != endpoint . server_url : raise ProtocolError ( 'OP Endpoint mismatch. Expected %s, got %s' % ( to_match . server_url , endpoint . server_url ) )
Verify that the given endpoint matches the information extracted from the OpenID assertion and raise an exception if there is a mismatch .
60,625
def _discoverAndVerify ( self , claimed_id , to_match_endpoints ) : logging . info ( 'Performing discovery on %s' % ( claimed_id , ) ) _ , services = self . _discover ( claimed_id ) if not services : raise DiscoveryFailure ( 'No OpenID information found at %s' % ( claimed_id , ) , None ) return self . _verifyDiscoveredServices ( claimed_id , services , to_match_endpoints )
Given an endpoint object created from the information in an OpenID response perform discovery and verify the discovery results returning the matching endpoint that is the result of doing that discovery .
60,626
def _processCheckAuthResponse ( self , response , server_url ) : is_valid = response . getArg ( OPENID_NS , 'is_valid' , 'false' ) invalidate_handle = response . getArg ( OPENID_NS , 'invalidate_handle' ) if invalidate_handle is not None : logging . info ( 'Received "invalidate_handle" from server %s' % ( server_url , ) ) if self . store is None : logging . error ( 'Unexpectedly got invalidate_handle without ' 'a store!' ) else : self . store . removeAssociation ( server_url , invalidate_handle ) if is_valid == 'true' : return True else : logging . error ( 'Server responds that checkAuth call is not valid' ) return False
Process the response message from a check_authentication request invalidating associations if requested .
60,627
def _getAssociation ( self , endpoint ) : assoc = self . store . getAssociation ( endpoint . server_url ) if assoc is None or assoc . expiresIn <= 0 : assoc = self . _negotiateAssociation ( endpoint ) if assoc is not None : self . store . storeAssociation ( endpoint . server_url , assoc ) return assoc
Get an association for the endpoint s server_url .
60,628
def _extractSupportedAssociationType ( self , server_error , endpoint , assoc_type ) : if server_error . error_code != 'unsupported-type' or server_error . message . isOpenID1 ( ) : logging . error ( 'Server error when requesting an association from %r: %s' % ( endpoint . server_url , server_error . error_text ) ) return None logging . error ( 'Unsupported association type %s: %s' % ( assoc_type , server_error . error_text , ) ) assoc_type = server_error . message . getArg ( OPENID_NS , 'assoc_type' ) session_type = server_error . message . getArg ( OPENID_NS , 'session_type' ) if assoc_type is None or session_type is None : logging . error ( 'Server responded with unsupported association ' 'session but did not supply a fallback.' ) return None elif not self . negotiator . isAllowed ( assoc_type , session_type ) : fmt = ( 'Server sent unsupported session/association type: ' 'session_type=%s, assoc_type=%s' ) logging . error ( fmt % ( session_type , assoc_type ) ) return None else : return assoc_type , session_type
Handle ServerErrors resulting from association requests .
60,629
def _requestAssociation ( self , endpoint , assoc_type , session_type ) : assoc_session , args = self . _createAssociateRequest ( endpoint , assoc_type , session_type ) try : response = self . _makeKVPost ( args , endpoint . server_url ) except fetchers . HTTPFetchingError , why : logging . exception ( 'openid.associate request failed: %s' % ( why [ 0 ] , ) ) return None try : assoc = self . _extractAssociation ( response , assoc_session ) except KeyError , why : logging . exception ( 'Missing required parameter in response from %s: %s' % ( endpoint . server_url , why [ 0 ] ) ) return None except ProtocolError , why : logging . exception ( 'Protocol error parsing response from %s: %s' % ( endpoint . server_url , why [ 0 ] ) ) return None else : return assoc
Make and process one association request to this endpoint s OP endpoint URL .
60,630
def _createAssociateRequest ( self , endpoint , assoc_type , session_type ) : session_type_class = self . session_types [ session_type ] assoc_session = session_type_class ( ) args = { 'mode' : 'associate' , 'assoc_type' : assoc_type , } if not endpoint . compatibilityMode ( ) : args [ 'ns' ] = OPENID2_NS if ( not endpoint . compatibilityMode ( ) or assoc_session . session_type != 'no-encryption' ) : args [ 'session_type' ] = assoc_session . session_type args . update ( assoc_session . getRequest ( ) ) message = Message . fromOpenIDArgs ( args ) return assoc_session , message
Create an association request for the given assoc_type and session_type .
60,631
def _extractAssociation ( self , assoc_response , assoc_session ) : assoc_type = assoc_response . getArg ( OPENID_NS , 'assoc_type' , no_default ) assoc_handle = assoc_response . getArg ( OPENID_NS , 'assoc_handle' , no_default ) expires_in_str = assoc_response . getArg ( OPENID_NS , 'expires_in' , no_default ) try : expires_in = int ( expires_in_str ) except ValueError , why : raise ProtocolError ( 'Invalid expires_in field: %s' % ( why [ 0 ] , ) ) if assoc_response . isOpenID1 ( ) : session_type = self . _getOpenID1SessionType ( assoc_response ) else : session_type = assoc_response . getArg ( OPENID2_NS , 'session_type' , no_default ) if assoc_session . session_type != session_type : if ( assoc_response . isOpenID1 ( ) and session_type == 'no-encryption' ) : assoc_session = PlainTextConsumerSession ( ) else : fmt = 'Session type mismatch. Expected %r, got %r' message = fmt % ( assoc_session . session_type , session_type ) raise ProtocolError ( message ) if assoc_type not in assoc_session . allowed_assoc_types : fmt = 'Unsupported assoc_type for session %s returned: %s' raise ProtocolError ( fmt % ( assoc_session . session_type , assoc_type ) ) try : secret = assoc_session . extractSecret ( assoc_response ) except ValueError , why : fmt = 'Malformed response for %s session: %s' raise ProtocolError ( fmt % ( assoc_session . session_type , why [ 0 ] ) ) return Association . fromExpiresIn ( expires_in , assoc_handle , secret , assoc_type )
Attempt to extract an association from the response given the association response message and the established association session .
60,632
def setAnonymous ( self , is_anonymous ) : if is_anonymous and self . message . isOpenID1 ( ) : raise ValueError ( 'OpenID 1 requests MUST include the ' 'identifier in the request' ) else : self . _anonymous = is_anonymous
Set whether this request should be made anonymously . If a request is anonymous the identifier will not be sent in the request . This is only useful if you are making another kind of request with an extension in this request .
60,633
def addExtensionArg ( self , namespace , key , value ) : self . message . setArg ( namespace , key , value )
Add an extension argument to this OpenID authentication request .
60,634
def redirectURL ( self , realm , return_to = None , immediate = False ) : message = self . getMessage ( realm , return_to , immediate ) return message . toURL ( self . endpoint . server_url )
Returns a URL with an encoded OpenID request .
60,635
def formMarkup ( self , realm , return_to = None , immediate = False , form_tag_attrs = None ) : message = self . getMessage ( realm , return_to , immediate ) return message . toFormMarkup ( self . endpoint . server_url , form_tag_attrs )
Get html for a form to submit this request to the IDP .
60,636
def htmlMarkup ( self , realm , return_to = None , immediate = False , form_tag_attrs = None ) : return oidutil . autoSubmitHTML ( self . formMarkup ( realm , return_to , immediate , form_tag_attrs ) )
Get an autosubmitting HTML page that submits this request to the IDP . This is just a wrapper for formMarkup .
60,637
def isSigned ( self , ns_uri , ns_key ) : return self . message . getKey ( ns_uri , ns_key ) in self . signed_fields
Return whether a particular key is signed regardless of its namespace alias
60,638
def getSigned ( self , ns_uri , ns_key , default = None ) : if self . isSigned ( ns_uri , ns_key ) : return self . message . getArg ( ns_uri , ns_key , default ) else : return default
Return the specified signed field if available otherwise return default
60,639
def getSignedNS ( self , ns_uri ) : msg_args = self . message . getArgs ( ns_uri ) for key in msg_args . iterkeys ( ) : if not self . isSigned ( ns_uri , key ) : logging . info ( "SuccessResponse.getSignedNS: (%s, %s) not signed." % ( ns_uri , key ) ) return None return msg_args
Get signed arguments from the response message . Return a dict of all arguments in the specified namespace . If any of the arguments are not signed return None .
60,640
def extensionResponse ( self , namespace_uri , require_signed ) : if require_signed : return self . getSignedNS ( namespace_uri ) else : return self . message . getArgs ( namespace_uri )
Return response arguments in the specified namespace .
60,641
def mkFilter ( parts ) : if parts is None : parts = [ BasicServiceEndpoint ] try : parts = list ( parts ) except TypeError : return mkCompoundFilter ( [ parts ] ) else : return mkCompoundFilter ( parts )
Convert a filter - convertable thing into a filter
60,642
def getServiceEndpoints ( self , yadis_url , service_element ) : endpoints = [ ] for type_uris , uri , _ in expandService ( service_element ) : endpoint = BasicServiceEndpoint ( yadis_url , type_uris , uri , service_element ) e = self . applyFilters ( endpoint ) if e is not None : endpoints . append ( e ) return endpoints
Returns an iterator of endpoint objects produced by the filter functions .
60,643
def applyFilters ( self , endpoint ) : for filter_function in self . filter_functions : e = filter_function ( endpoint ) if e is not None : return e return None
Apply filter functions to an endpoint until one of them returns non - None .
60,644
def getServiceEndpoints ( self , yadis_url , service_element ) : endpoints = [ ] for subfilter in self . subfilters : endpoints . extend ( subfilter . getServiceEndpoints ( yadis_url , service_element ) ) return endpoints
Generate all endpoint objects for all of the subfilters of this filter and return their concatenation .
60,645
def randomString ( length , chrs = None ) : if chrs is None : return getBytes ( length ) else : n = len ( chrs ) return '' . join ( [ chrs [ randrange ( n ) ] for _ in xrange ( length ) ] )
Produce a string of length random bytes chosen from chrs .
60,646
def _hasher_first_run ( self , preimage ) : new_hasher = self . _backend . keccak256 assert new_hasher ( b'' ) == b"\xc5\xd2F\x01\x86\xf7#<\x92~}\xb2\xdc\xc7\x03\xc0\xe5\x00\xb6S\xca\x82';\x7b\xfa\xd8\x04]\x85\xa4p" self . hasher = new_hasher return new_hasher ( preimage )
Invoke the backend on - demand and check an expected hash result then replace this first run with the new hasher method . This is a bit of a hacky way to minimize overhead on hash calls after this first one .
60,647
def dirname ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . dirname ( path )
Returns the directory component of a pathname and None if the argument is None
60,648
def basename ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . basename ( path )
Returns the final component of a pathname and None if the argument is None
60,649
def normpath ( path : Optional [ str ] ) -> Optional [ str ] : if path is not None : return os . path . normpath ( path )
Normalizes the path returns None if the argument is None
60,650
def join_paths ( path1 : Optional [ str ] , path2 : Optional [ str ] ) -> Optional [ str ] : if path1 is not None and path2 is not None : return os . path . join ( path1 , path2 )
Joins two paths if neither of them is None
60,651
def stasher ( self ) : stashed = [ False ] clean = [ False ] def stash ( ) : if clean [ 0 ] or not self . repo . is_dirty ( submodules = False ) : clean [ 0 ] = True return if stashed [ 0 ] : return if self . change_count > 1 : message = 'stashing {0} changes' else : message = 'stashing {0} change' print ( colored ( message . format ( self . change_count ) , 'magenta' ) ) try : self . _run ( 'stash' ) except GitError as e : raise StashError ( stderr = e . stderr , stdout = e . stdout ) stashed [ 0 ] = True yield stash if stashed [ 0 ] : print ( colored ( 'unstashing' , 'magenta' ) ) try : self . _run ( 'stash' , 'pop' ) except GitError as e : raise UnstashError ( stderr = e . stderr , stdout = e . stdout )
A stashing contextmanager .
60,652
def checkout ( self , branch_name ) : try : find ( self . repo . branches , lambda b : b . name == branch_name ) . checkout ( ) except OrigCheckoutError as e : raise CheckoutError ( branch_name , details = e )
Checkout a branch by name .
60,653
def rebase ( self , target_branch ) : current_branch = self . repo . active_branch arguments = ( ( [ self . config ( 'git-up.rebase.arguments' ) ] or [ ] ) + [ target_branch . name ] ) try : self . _run ( 'rebase' , * arguments ) except GitError as e : raise RebaseError ( current_branch . name , target_branch . name , ** e . __dict__ )
Rebase to target branch .
60,654
def push ( self , * args , ** kwargs ) : stdout = six . b ( '' ) cmd = self . git . push ( as_process = True , * args , ** kwargs ) while True : output = cmd . stdout . read ( 1 ) sys . stdout . write ( output . decode ( 'utf-8' ) ) sys . stdout . flush ( ) stdout += output if output == six . b ( "" ) : break try : cmd . wait ( ) except GitCommandError as error : message = "'{0}' returned exit status {1}" . format ( ' ' . join ( str ( c ) for c in error . command ) , error . status ) raise GitError ( message , stderr = error . stderr , stdout = stdout ) return stdout . strip ( )
Push commits to remote
60,655
def change_count ( self ) : status = self . git . status ( porcelain = True , untracked_files = 'no' ) . strip ( ) if not status : return 0 else : return len ( status . split ( '\n' ) )
The number of changes in the working directory .
60,656
def uniq ( seq ) : seen = set ( ) return [ x for x in seq if str ( x ) not in seen and not seen . add ( str ( x ) ) ]
Return a copy of seq without duplicates .
60,657
def current_version ( ) : import setuptools version = [ None ] def monkey_setup ( ** settings ) : version [ 0 ] = settings [ 'version' ] old_setup = setuptools . setup setuptools . setup = monkey_setup import setup reload ( setup ) setuptools . setup = old_setup return version [ 0 ]
Get the current version number from setup . py
60,658
def run ( version , quiet , no_fetch , push , ** kwargs ) : if version : if NO_DISTRIBUTE : print ( colored ( 'Please install \'git-up\' via pip in order to ' 'get version information.' , 'yellow' ) ) else : GitUp ( sparse = True ) . version_info ( ) return if quiet : sys . stdout = StringIO ( ) try : gitup = GitUp ( ) if push is not None : gitup . settings [ 'push.auto' ] = push if no_fetch : gitup . should_fetch = False except GitError : sys . exit ( 1 ) else : gitup . run ( )
A nicer git pull .
60,659
def run ( self ) : try : if self . should_fetch : self . fetch ( ) self . rebase_all_branches ( ) if self . with_bundler ( ) : self . check_bundler ( ) if self . settings [ 'push.auto' ] : self . push ( ) except GitError as error : self . print_error ( error ) if self . testing : raise else : sys . exit ( 1 )
Run all the git - up stuff .
60,660
def fetch ( self ) : fetch_kwargs = { 'multiple' : True } fetch_args = [ ] if self . is_prune ( ) : fetch_kwargs [ 'prune' ] = True if self . settings [ 'fetch.all' ] : fetch_kwargs [ 'all' ] = True else : if '.' in self . remotes : self . remotes . remove ( '.' ) if not self . remotes : return fetch_args . append ( self . remotes ) try : self . git . fetch ( * fetch_args , ** fetch_kwargs ) except GitError as error : error . message = "`git fetch` failed" raise error
Fetch the recent refs from the remotes . Unless git - up . fetch . all is set to true all remotes with locally existent branches will be fetched .
60,661
def log ( self , branch , remote ) : log_hook = self . settings [ 'rebase.log-hook' ] if log_hook : if ON_WINDOWS : log_hook = re . sub ( r'\$(\d+)' , r'%\1' , log_hook ) log_hook = re . sub ( r'%(?!\d)' , '%%' , log_hook ) log_hook = re . sub ( r'; ?' , r'\n' , log_hook ) with NamedTemporaryFile ( prefix = 'PyGitUp.' , suffix = '.bat' , delete = False ) as bat_file : bat_file . file . write ( b'@echo off\n' ) bat_file . file . write ( log_hook . encode ( 'utf-8' ) ) state = subprocess . call ( [ bat_file . name , branch . name , remote . name ] ) os . remove ( bat_file . name ) else : state = subprocess . call ( [ log_hook , 'git-up' , branch . name , remote . name ] , shell = True ) if self . testing : assert state == 0 , 'log_hook returned != 0'
Call a log - command if set by git - up . fetch . all .
60,662
def version_info ( self ) : package = pkg . get_distribution ( 'git-up' ) local_version_str = package . version local_version = package . parsed_version print ( 'GitUp version is: ' + colored ( 'v' + local_version_str , 'green' ) ) if not self . settings [ 'updates.check' ] : return print ( 'Checking for updates...' , end = '' ) try : reader = codecs . getreader ( 'utf-8' ) details = json . load ( reader ( urlopen ( PYPI_URL ) ) ) online_version = details [ 'info' ] [ 'version' ] except ( HTTPError , URLError , ValueError ) : recent = True else : recent = local_version >= pkg . parse_version ( online_version ) if not recent : print ( '\rRecent version is: ' + colored ( 'v' + online_version , color = 'yellow' , attrs = [ 'bold' ] ) ) print ( 'Run \'pip install -U git-up\' to get the update.' ) else : sys . stdout . write ( '\r' + ' ' * 80 + '\n' )
Tell what version we re running at and if it s up to date .
60,663
def load_config ( self ) : for key in self . settings : value = self . config ( key ) if value == '' or value is None : continue if value . lower ( ) == 'true' : value = True elif value . lower ( ) == 'false' : value = False elif value : pass self . settings [ key ] = value
Load the configuration from git config .
60,664
def check_bundler ( self ) : def get_config ( name ) : return name if self . config ( 'bundler.' + name ) else '' from pkg_resources import Requirement , resource_filename relative_path = os . path . join ( 'PyGitUp' , 'check-bundler.rb' ) bundler_script = resource_filename ( Requirement . parse ( 'git-up' ) , relative_path ) assert os . path . exists ( bundler_script ) , 'check-bundler.rb doesn\'t ' 'exist!' return_value = subprocess . call ( [ 'ruby' , bundler_script , get_config ( 'autoinstall' ) , get_config ( 'local' ) , get_config ( 'rbenv' ) ] ) if self . testing : assert return_value == 0 , 'Errors while executing check-bundler.rb'
Run the bundler check .
60,665
def opendocx ( file ) : mydoc = zipfile . ZipFile ( file ) xmlcontent = mydoc . read ( 'word/document.xml' ) document = etree . fromstring ( xmlcontent ) return document
Open a docx file return a document XML tree
60,666
def makeelement ( tagname , tagtext = None , nsprefix = 'w' , attributes = None , attrnsprefix = None ) : namespacemap = None if isinstance ( nsprefix , list ) : namespacemap = { } for prefix in nsprefix : namespacemap [ prefix ] = nsprefixes [ prefix ] nsprefix = nsprefix [ 0 ] if nsprefix : namespace = '{%s}' % nsprefixes [ nsprefix ] else : namespace = '' newelement = etree . Element ( namespace + tagname , nsmap = namespacemap ) if attributes : if not attrnsprefix : if nsprefix == 'w' : attributenamespace = namespace else : attributenamespace = '' else : attributenamespace = '{' + nsprefixes [ attrnsprefix ] + '}' for tagattribute in attributes : newelement . set ( attributenamespace + tagattribute , attributes [ tagattribute ] ) if tagtext : newelement . text = tagtext return newelement
Create an element & return it
60,667
def heading ( headingtext , headinglevel , lang = 'en' ) : lmap = { 'en' : 'Heading' , 'it' : 'Titolo' } paragraph = makeelement ( 'p' ) pr = makeelement ( 'pPr' ) pStyle = makeelement ( 'pStyle' , attributes = { 'val' : lmap [ lang ] + str ( headinglevel ) } ) run = makeelement ( 'r' ) text = makeelement ( 't' , tagtext = headingtext ) pr . append ( pStyle ) run . append ( text ) paragraph . append ( pr ) paragraph . append ( run ) return paragraph
Make a new heading return the heading element
60,668
def clean ( document ) : newdocument = document for t in ( 't' , 'r' ) : rmlist = [ ] for element in newdocument . iter ( ) : if element . tag == '{%s}%s' % ( nsprefixes [ 'w' ] , t ) : if not element . text and not len ( element ) : rmlist . append ( element ) for element in rmlist : element . getparent ( ) . remove ( element ) return newdocument
Perform misc cleaning operations on documents . Returns cleaned document .
60,669
def findTypeParent ( element , tag ) : p = element while True : p = p . getparent ( ) if p . tag == tag : return p return None
Finds fist parent of element of the given type
60,670
def AdvSearch ( document , search , bs = 3 ) : searchre = re . compile ( search ) matches = [ ] searchels = [ ] for element in document . iter ( ) : if element . tag == '{%s}t' % nsprefixes [ 'w' ] : if element . text : searchels . append ( element ) if len ( searchels ) > bs : searchels . pop ( 0 ) found = False for l in range ( 1 , len ( searchels ) + 1 ) : if found : break for s in range ( len ( searchels ) ) : if found : break if s + l <= len ( searchels ) : e = range ( s , s + l ) txtsearch = '' for k in e : txtsearch += searchels [ k ] . text match = searchre . search ( txtsearch ) if match : matches . append ( match . group ( ) ) found = True return set ( matches )
Return set of all regex matches
60,671
def getdocumenttext ( document ) : paratextlist = [ ] paralist = [ ] for element in document . iter ( ) : if element . tag == '{' + nsprefixes [ 'w' ] + '}p' : paralist . append ( element ) for para in paralist : paratext = u'' for element in para . iter ( ) : if element . tag == '{' + nsprefixes [ 'w' ] + '}t' : if element . text : paratext = paratext + element . text elif element . tag == '{' + nsprefixes [ 'w' ] + '}tab' : paratext = paratext + '\t' if not len ( paratext ) == 0 : paratextlist . append ( paratext ) return paratextlist
Return the raw text of a document as a list of paragraphs .
60,672
def wordrelationships ( relationshiplist ) : relationships = etree . fromstring ( '<Relationships xmlns="http://schemas.openxmlformats.org/package/2006' '/relationships"></Relationships>' ) count = 0 for relationship in relationshiplist : rel_elm = makeelement ( 'Relationship' , nsprefix = None , attributes = { 'Id' : 'rId' + str ( count + 1 ) , 'Type' : relationship [ 0 ] , 'Target' : relationship [ 1 ] } ) relationships . append ( rel_elm ) count += 1 return relationships
Generate a Word relationships file
60,673
def savedocx ( document , coreprops , appprops , contenttypes , websettings , wordrelationships , output , imagefiledict = None ) : if imagefiledict is None : warn ( 'Using savedocx() without imagefiledict parameter will be deprec' 'ated in the future.' , PendingDeprecationWarning ) assert os . path . isdir ( template_dir ) docxfile = zipfile . ZipFile ( output , mode = 'w' , compression = zipfile . ZIP_DEFLATED ) prev_dir = os . path . abspath ( '.' ) os . chdir ( template_dir ) treesandfiles = { document : 'word/document.xml' , coreprops : 'docProps/core.xml' , appprops : 'docProps/app.xml' , contenttypes : '[Content_Types].xml' , websettings : 'word/webSettings.xml' , wordrelationships : 'word/_rels/document.xml.rels' } for tree in treesandfiles : log . info ( 'Saving: %s' % treesandfiles [ tree ] ) treestring = etree . tostring ( tree , pretty_print = True ) docxfile . writestr ( treesandfiles [ tree ] , treestring ) if imagefiledict is not None : for imagepath , picrelid in imagefiledict . items ( ) : archivename = 'word/media/%s_%s' % ( picrelid , basename ( imagepath ) ) log . info ( 'Saving: %s' , archivename ) docxfile . write ( imagepath , archivename ) files_to_ignore = [ '.DS_Store' ] for dirpath , dirnames , filenames in os . walk ( '.' ) : for filename in filenames : if filename in files_to_ignore : continue templatefile = join ( dirpath , filename ) archivename = templatefile [ 2 : ] log . info ( 'Saving: %s' , archivename ) docxfile . write ( templatefile , archivename ) log . info ( 'Saved new file to: %r' , output ) docxfile . close ( ) os . chdir ( prev_dir ) return
Save a modified document
60,674
def _depr ( fn , usage , stacklevel = 3 ) : warn ( '{0} is deprecated. Use {1} instead' . format ( fn , usage ) , stacklevel = stacklevel , category = DeprecationWarning )
Internal convenience function for deprecation warnings
60,675
def upsert ( self , key , value , cas = 0 , ttl = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . upsert ( self , key , value , cas = cas , ttl = ttl , format = format , persist_to = persist_to , replicate_to = replicate_to )
Unconditionally store the object in Couchbase .
60,676
def insert ( self , key , value , ttl = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . insert ( self , key , value , ttl = ttl , format = format , persist_to = persist_to , replicate_to = replicate_to )
Store an object in Couchbase unless it already exists .
60,677
def prepend ( self , key , value , cas = 0 , format = None , persist_to = 0 , replicate_to = 0 ) : return _Base . prepend ( self , key , value , cas = cas , format = format , persist_to = persist_to , replicate_to = replicate_to )
Prepend a string to an existing value in Couchbase .
60,678
def get ( self , key , ttl = 0 , quiet = None , replica = False , no_format = False ) : return _Base . get ( self , key , ttl = ttl , quiet = quiet , replica = replica , no_format = no_format )
Obtain an object stored in Couchbase by given key .
60,679
def touch ( self , key , ttl = 0 ) : return _Base . touch ( self , key , ttl = ttl )
Update a key s expiration time
60,680
def lock ( self , key , ttl = 0 ) : return _Base . lock ( self , key , ttl = ttl )
Lock and retrieve a key - value entry in Couchbase .
60,681
def unlock ( self , key , cas ) : return _Base . unlock ( self , key , cas = cas )
Unlock a Locked Key in Couchbase .
60,682
def remove ( self , key , cas = 0 , quiet = None , persist_to = 0 , replicate_to = 0 ) : return _Base . remove ( self , key , cas = cas , quiet = quiet , persist_to = persist_to , replicate_to = replicate_to )
Remove the key - value entry for a given key in Couchbase .
60,683
def counter ( self , key , delta = 1 , initial = None , ttl = 0 ) : return _Base . counter ( self , key , delta = delta , initial = initial , ttl = ttl )
Increment or decrement the numeric value of an item .
60,684
def mutate_in ( self , key , * specs , ** kwargs ) : sdflags = kwargs . pop ( '_sd_doc_flags' , 0 ) if kwargs . pop ( 'insert_doc' , False ) : sdflags |= _P . CMDSUBDOC_F_INSERT_DOC if kwargs . pop ( 'upsert_doc' , False ) : sdflags |= _P . CMDSUBDOC_F_UPSERT_DOC kwargs [ '_sd_doc_flags' ] = sdflags return super ( Bucket , self ) . mutate_in ( key , specs , ** kwargs )
Perform multiple atomic modifications within a document .
60,685
def lookup_in ( self , key , * specs , ** kwargs ) : return super ( Bucket , self ) . lookup_in ( { key : specs } , ** kwargs )
Atomically retrieve one or more paths from a document .
60,686
def retrieve_in ( self , key , * paths , ** kwargs ) : import couchbase . subdocument as SD return self . lookup_in ( key , * tuple ( SD . get ( x ) for x in paths ) , ** kwargs )
Atomically fetch one or more paths from a document .
60,687
def stats ( self , keys = None , keystats = False ) : if keys and not isinstance ( keys , ( tuple , list ) ) : keys = ( keys , ) return self . _stats ( keys , keystats = keystats )
Request server statistics .
60,688
def observe ( self , key , master_only = False ) : return _Base . observe ( self , key , master_only = master_only )
Return storage information for a key .
60,689
def endure ( self , key , persist_to = - 1 , replicate_to = - 1 , cas = 0 , check_removed = False , timeout = 5.0 , interval = 0.010 ) : kv = { key : cas } rvs = self . endure_multi ( keys = kv , persist_to = persist_to , replicate_to = replicate_to , check_removed = check_removed , timeout = timeout , interval = interval ) return rvs [ key ]
Wait until a key has been distributed to one or more nodes
60,690
def endure_multi ( self , keys , persist_to = - 1 , replicate_to = - 1 , timeout = 5.0 , interval = 0.010 , check_removed = False ) : return _Base . endure_multi ( self , keys , persist_to = persist_to , replicate_to = replicate_to , timeout = timeout , interval = interval , check_removed = check_removed )
Check durability requirements for multiple keys
60,691
def remove_multi ( self , kvs , quiet = None ) : return _Base . remove_multi ( self , kvs , quiet = quiet )
Remove multiple items from the cluster
60,692
def counter_multi ( self , kvs , initial = None , delta = 1 , ttl = 0 ) : return _Base . counter_multi ( self , kvs , initial = initial , delta = delta , ttl = ttl )
Perform counter operations on multiple items
60,693
def rget ( self , key , replica_index = None , quiet = None ) : if replica_index is not None : return _Base . _rgetix ( self , key , replica = replica_index , quiet = quiet ) else : return _Base . _rget ( self , key , quiet = quiet )
Get an item from a replica node
60,694
def query ( self , design , view , use_devmode = False , ** kwargs ) : design = self . _mk_devmode ( design , use_devmode ) itercls = kwargs . pop ( 'itercls' , View ) return itercls ( self , design , view , ** kwargs )
Query a pre - defined MapReduce view passing parameters .
60,695
def n1ql_query ( self , query , * args , ** kwargs ) : if not isinstance ( query , N1QLQuery ) : query = N1QLQuery ( query ) itercls = kwargs . pop ( 'itercls' , N1QLRequest ) return itercls ( query , self , * args , ** kwargs )
Execute a N1QL query .
60,696
def analytics_query ( self , query , host , * args , ** kwargs ) : if not isinstance ( query , AnalyticsQuery ) : query = AnalyticsQuery ( query , * args , ** kwargs ) else : query . update ( * args , ** kwargs ) return couchbase . analytics . gen_request ( query , host , self )
Execute an Analytics query .
60,697
def search ( self , index , query , ** kwargs ) : itercls = kwargs . pop ( 'itercls' , _FTS . SearchRequest ) iterargs = itercls . mk_kwargs ( kwargs ) params = kwargs . pop ( 'params' , _FTS . Params ( ** kwargs ) ) body = _FTS . make_search_body ( index , query , params ) return itercls ( body , self , ** iterargs )
Perform full - text searches
60,698
def is_ssl ( self ) : mode = self . _cntl ( op = _LCB . LCB_CNTL_SSL_MODE , value_type = 'int' ) return mode & _LCB . LCB_SSL_ENABLED != 0
Read - only boolean property indicating whether SSL is used for this connection .
60,699
def flush ( self ) : path = '/pools/default/buckets/{0}/controller/doFlush' path = path . format ( self . bucket ) return self . _http_request ( type = _LCB . LCB_HTTP_TYPE_MANAGEMENT , path = path , method = _LCB . LCB_HTTP_METHOD_POST )
Clears the bucket s contents .