idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
60,200
def create_mysql_oursql ( username , password , host , port , database , ** kwargs ) : return create_engine ( _create_mysql_oursql ( username , password , host , port , database ) , ** kwargs )
create an engine connected to a mysql database using oursql .
60,201
def create_mysql_pymysql ( username , password , host , port , database , ** kwargs ) : return create_engine ( _create_mysql_pymysql ( username , password , host , port , database ) , ** kwargs )
create an engine connected to a mysql database using pymysql .
60,202
def create_mysql_cymysql ( username , password , host , port , database , ** kwargs ) : return create_engine ( _create_mysql_cymysql ( username , password , host , port , database ) , ** kwargs )
create an engine connected to a mysql database using cymysql .
60,203
def create_mssql_pyodbc ( username , password , host , port , database , ** kwargs ) : return create_engine ( _create_mssql_pyodbc ( username , password , host , port , database ) , ** kwargs )
create an engine connected to a mssql database using pyodbc .
60,204
def create_mssql_pymssql ( username , password , host , port , database , ** kwargs ) : return create_engine ( _create_mssql_pymssql ( username , password , host , port , database ) , ** kwargs )
create an engine connected to a mssql database using pymssql .
60,205
def titleize ( text ) : if len ( text ) == 0 : return text else : text = text . lower ( ) chunks = [ chunk [ 0 ] . upper ( ) + chunk [ 1 : ] for chunk in text . split ( " " ) if len ( chunk ) >= 1 ] return " " . join ( chunks )
Capitalizes all the words and replaces some characters in the string to create a nicer looking title .
60,206
def grouper_list ( l , n ) : chunk = list ( ) counter = 0 for item in l : counter += 1 chunk . append ( item ) if counter == n : yield chunk chunk = list ( ) counter = 0 if len ( chunk ) > 0 : yield chunk
Evenly divide list into fixed - length piece no filled value if chunk size smaller than fixed - length .
60,207
def convert_query_to_sql_statement ( query ) : context = query . _compile_context ( ) context . statement . use_labels = False return context . statement
Convert a Query object created from orm query into executable sql statement .
60,208
def execute_query_return_result_proxy ( query ) : context = query . _compile_context ( ) context . statement . use_labels = False if query . _autoflush and not query . _populate_existing : query . session . _autoflush ( ) conn = query . _get_bind_args ( context , query . _connection_from_session , close_with_result = True ) return conn . execute ( context . statement , query . _params )
Execute a query yield result proxy .
60,209
def find_state ( self , state , best_match = True , min_similarity = 70 ) : result_state_short_list = list ( ) if state . upper ( ) in STATE_ABBR_SHORT_TO_LONG : result_state_short_list . append ( state . upper ( ) ) else : if best_match : state_long , confidence = extractOne ( state , self . state_list ) if confidence >= min_similarity : result_state_short_list . append ( STATE_ABBR_LONG_TO_SHORT [ state_long ] ) else : for state_long , confidence in extract ( state , self . state_list ) : if confidence >= min_similarity : result_state_short_list . append ( STATE_ABBR_LONG_TO_SHORT [ state_long ] ) if len ( result_state_short_list ) == 0 : message = ( "'%s' is not a valid state name, use 2 letter " "short name or correct full name please." ) raise ValueError ( message % state ) return result_state_short_list
Fuzzy search correct state .
60,210
def find_city ( self , city , state = None , best_match = True , min_similarity = 70 ) : if state : state_sort = self . find_state ( state , best_match = True ) [ 0 ] city_pool = self . state_to_city_mapper [ state_sort . upper ( ) ] else : city_pool = self . city_list result_city_list = list ( ) if best_match : city , confidence = extractOne ( city , city_pool ) if confidence >= min_similarity : result_city_list . append ( city ) else : for city , confidence in extract ( city , city_pool ) : if confidence >= min_similarity : result_city_list . append ( city ) if len ( result_city_list ) == 0 : raise ValueError ( "'%s' is not a valid city name" % city ) return result_city_list
Fuzzy search correct city .
60,211
def _resolve_sort_by ( sort_by , flag_radius_query ) : if sort_by is None : if flag_radius_query : sort_by = SORT_BY_DIST elif isinstance ( sort_by , string_types ) : if sort_by . lower ( ) == SORT_BY_DIST : if flag_radius_query is False : msg = "`sort_by` arg can be 'dist' only under distance based query!" raise ValueError ( msg ) sort_by = SORT_BY_DIST elif sort_by not in SimpleZipcode . __table__ . columns : msg = "`sort_by` arg has to be one of the Zipcode attribute or 'dist'!" raise ValueError ( msg ) else : sort_by = sort_by . name return sort_by
Result sort_by argument .
60,212
def by_zipcode ( self , zipcode , zipcode_type = None , zero_padding = True ) : if zero_padding : zipcode = str ( zipcode ) . zfill ( 5 ) else : zipcode = str ( zipcode ) res = self . query ( zipcode = zipcode , sort_by = None , returns = 1 , zipcode_type = zipcode_type , ) if len ( res ) : return res [ 0 ] else : return self . zip_klass ( )
Search zipcode by exact 5 digits zipcode . No zero padding is needed .
60,213
def by_prefix ( self , prefix , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . zipcode . name , ascending = True , returns = DEFAULT_LIMIT ) : return self . query ( prefix = prefix , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by first N digits .
60,214
def by_pattern ( self , pattern , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . zipcode . name , ascending = True , returns = DEFAULT_LIMIT ) : return self . query ( pattern = pattern , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode by wildcard .
60,215
def by_state ( self , state , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . zipcode . name , ascending = True , returns = DEFAULT_LIMIT ) : return self . query ( state = state , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by fuzzy State name .
60,216
def by_coordinates ( self , lat , lng , radius = 25.0 , zipcode_type = ZipcodeType . Standard , sort_by = SORT_BY_DIST , ascending = True , returns = DEFAULT_LIMIT ) : return self . query ( lat = lat , lng = lng , radius = radius , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information near a coordinates on a map .
60,217
def by_population ( self , lower = - 1 , upper = 2 ** 31 , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . population . name , ascending = False , returns = DEFAULT_LIMIT ) : return self . query ( population_lower = lower , population_upper = upper , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by population range .
60,218
def by_population_density ( self , lower = - 1 , upper = 2 ** 31 , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . population_density . name , ascending = False , returns = DEFAULT_LIMIT ) : return self . query ( population_density_lower = lower , population_density_upper = upper , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by population density range .
60,219
def by_housing_units ( self , lower = - 1 , upper = 2 ** 31 , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . housing_units . name , ascending = False , returns = DEFAULT_LIMIT ) : return self . query ( housing_units_lower = lower , housing_units_upper = upper , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by house of units .
60,220
def by_occupied_housing_units ( self , lower = - 1 , upper = 2 ** 31 , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . occupied_housing_units . name , ascending = False , returns = DEFAULT_LIMIT ) : return self . query ( occupied_housing_units_lower = lower , occupied_housing_units_upper = upper , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by occupied house of units .
60,221
def by_median_home_value ( self , lower = - 1 , upper = 2 ** 31 , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . median_home_value . name , ascending = False , returns = DEFAULT_LIMIT ) : return self . query ( median_home_value_lower = lower , median_home_value_upper = upper , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by median home value .
60,222
def by_median_household_income ( self , lower = - 1 , upper = 2 ** 31 , zipcode_type = ZipcodeType . Standard , sort_by = SimpleZipcode . median_household_income . name , ascending = False , returns = DEFAULT_LIMIT ) : return self . query ( median_household_income_lower = lower , median_household_income_upper = upper , sort_by = sort_by , zipcode_type = zipcode_type , ascending = ascending , returns = returns , )
Search zipcode information by median household income .
60,223
def select_single_column ( engine , column ) : s = select ( [ column ] ) return column . name , [ row [ 0 ] for row in engine . execute ( s ) ]
Select data from single column .
60,224
def select_many_column ( engine , * columns ) : if isinstance ( columns [ 0 ] , Column ) : pass elif isinstance ( columns [ 0 ] , ( list , tuple ) ) : columns = columns [ 0 ] s = select ( columns ) headers = [ str ( column ) for column in columns ] data = [ tuple ( row ) for row in engine . execute ( s ) ] return headers , data
Select data from multiple columns .
60,225
def select_random ( engine , table_or_columns , limit = 5 ) : s = select ( table_or_columns ) . order_by ( func . random ( ) ) . limit ( limit ) return engine . execute ( s ) . fetchall ( )
Randomly select some rows from table .
60,226
def smart_insert ( engine , table , data , minimal_size = 5 ) : insert = table . insert ( ) if isinstance ( data , list ) : try : engine . execute ( insert , data ) except IntegrityError : n = len ( data ) if n >= minimal_size ** 2 : n_chunk = math . floor ( math . sqrt ( n ) ) for chunk in grouper_list ( data , n_chunk ) : smart_insert ( engine , table , chunk , minimal_size ) else : for row in data : try : engine . execute ( insert , row ) except IntegrityError : pass else : try : engine . execute ( insert , data ) except IntegrityError : pass
An optimized Insert strategy . Guarantee successful and highest insertion speed . But ATOMIC WRITE IS NOT ENSURED IF THE PROGRAM IS INTERRUPTED .
60,227
def load_keys ( ) : consumer_key = os . environ . get ( 'CONSUMER_KEY' ) consumer_secret = os . environ . get ( 'CONSUMER_SECRET' ) access_token = os . environ . get ( 'ACCESS_TOKEN' ) access_token_secret = os . environ . get ( 'ACCESS_TOKEN_SECRET' ) return consumer_key , consumer_secret , access_token , access_token_secret
Loads Twitter keys .
60,228
def search ( self , q ) : results = self . _api . search ( q = q ) return results
Search tweets by keyword .
60,229
def search_by_user ( self , screen_name , count = 100 ) : results = self . _api . user_timeline ( screen_name = screen_name , count = count ) return results
Search tweets by user .
60,230
def on_successful_login ( self , subject , authc_token , account_id ) : self . forget_identity ( subject ) if authc_token . is_remember_me : self . remember_identity ( subject , authc_token , account_id ) else : msg = ( "AuthenticationToken did not indicate that RememberMe is " "requested. RememberMe functionality will not be executed " "for corresponding account." ) logger . debug ( msg )
Reacts to the successful login attempt by first always forgetting any previously stored identity . Then if the authc_token is a RememberMe type of token the associated identity will be remembered for later retrieval during a new user session .
60,231
def remember_identity ( self , subject , authc_token , account_id ) : try : identifiers = self . get_identity_to_remember ( subject , account_id ) except AttributeError : msg = "Neither account_id nor identifier arguments passed" raise AttributeError ( msg ) encrypted = self . convert_identifiers_to_bytes ( identifiers ) self . remember_encrypted_identity ( subject , encrypted )
Yosai consolidates rememberIdentity an overloaded method in java to a method that will use an identifier - else - account logic .
60,232
def convert_bytes_to_identifiers ( self , encrypted , subject_context ) : decrypted = self . decrypt ( encrypted ) return self . serialization_manager . deserialize ( decrypted )
If a cipher_service is available it will be used to first decrypt the serialized message . Then the bytes are deserialized and returned .
60,233
def encrypt ( self , serialized ) : fernet = Fernet ( self . encryption_cipher_key ) return fernet . encrypt ( serialized )
Encrypts the serialized message using Fernet
60,234
def decrypt ( self , encrypted ) : fernet = Fernet ( self . decryption_cipher_key ) return fernet . decrypt ( encrypted )
decrypts the encrypted message using Fernet
60,235
def create_subject ( self , authc_token = None , account_id = None , existing_subject = None , subject_context = None ) : if subject_context is None : context = self . create_subject_context ( existing_subject ) context . authenticated = True context . authentication_token = authc_token context . account_id = account_id if ( existing_subject ) : context . subject = existing_subject else : context = copy . copy ( subject_context ) context = self . ensure_security_manager ( context ) context = self . resolve_session ( context ) context = self . resolve_identifiers ( context ) subject = self . do_create_subject ( context ) self . save ( subject ) return subject
Creates a Subject instance for the user represented by the given method arguments .
60,236
def login ( self , subject , authc_token ) : try : account_id = self . authenticator . authenticate_account ( subject . identifiers , authc_token ) except AdditionalAuthenticationRequired as exc : self . update_subject_identity ( exc . account_id , subject ) raise AdditionalAuthenticationRequired except AuthenticationException as authc_ex : try : self . on_failed_login ( authc_token , authc_ex , subject ) except Exception : msg = ( "on_failed_login method raised an exception. Logging " "and propagating original AuthenticationException." ) logger . info ( msg , exc_info = True ) raise logged_in = self . create_subject ( authc_token = authc_token , account_id = account_id , existing_subject = subject ) self . on_successful_login ( authc_token , account_id , logged_in ) return logged_in
Login authenticates a user using an AuthenticationToken . If authentication is successful AND the Authenticator has determined that authentication is complete for the account login constructs a Subject instance representing the authenticated account s identity . Once a subject instance is constructed it is bound to the application for subsequent access before being returned to the caller .
60,237
def ensure_security_manager ( self , subject_context ) : if ( subject_context . resolve_security_manager ( ) is not None ) : msg = ( "Subject Context resolved a security_manager " "instance, so not re-assigning. Returning." ) logger . debug ( msg ) return subject_context msg = ( "No security_manager found in context. Adding self " "reference." ) logger . debug ( msg ) subject_context . security_manager = self return subject_context
Determines whether there is a SecurityManager instance in the context and if not adds self to the context . This ensures that do_create_subject will have access to a SecurityManager during Subject construction .
60,238
def resolve_session ( self , subject_context ) : if ( subject_context . resolve_session ( ) is not None ) : msg = ( "Context already contains a session. Returning." ) logger . debug ( msg ) return subject_context try : session = self . resolve_context_session ( subject_context ) subject_context . session = session except InvalidSessionException : msg = ( "Resolved subject_subject_context context session is " "invalid. Ignoring and creating an anonymous " "(session-less) Subject instance." ) logger . debug ( msg , exc_info = True ) return subject_context
This method attempts to resolve any associated session based on the context and returns a context that represents this resolved Session to ensure it may be referenced if needed by the invoked do_create_subject that performs actual Subject construction .
60,239
def resolve_identifiers ( self , subject_context ) : session = subject_context . session identifiers = subject_context . resolve_identifiers ( session ) if ( not identifiers ) : msg = ( "No identity (identifier_collection) found in the " "subject_context. Looking for a remembered identity." ) logger . debug ( msg ) identifiers = self . get_remembered_identity ( subject_context ) if identifiers : msg = ( "Found remembered IdentifierCollection. Adding to the " "context to be used for subject construction." ) logger . debug ( msg ) subject_context . identifiers = identifiers subject_context . remembered = True else : msg = ( "No remembered identity found. Returning original " "context." ) logger . debug ( msg ) return subject_context
ensures that a subject_context has identifiers and if it doesn t will attempt to locate them using heuristics
60,240
def logout ( self , subject ) : if ( subject is None ) : msg = "Subject argument cannot be None." raise ValueError ( msg ) self . before_logout ( subject ) identifiers = copy . copy ( subject . identifiers ) if ( identifiers ) : msg = ( "Logging out subject with primary identifier {0}" . format ( identifiers . primary_identifier ) ) logger . debug ( msg ) try : self . delete ( subject ) except Exception : msg = "Unable to cleanly unbind Subject. Ignoring (logging out)." logger . debug ( msg , exc_info = True ) finally : try : self . stop_session ( subject ) except Exception : msg2 = ( "Unable to cleanly stop Session for Subject. " "Ignoring (logging out)." ) logger . debug ( msg2 , exc_info = True )
Logs out the specified Subject from the system .
60,241
def is_permitted ( self , identifiers , permission_s ) : identifier = identifiers . primary_identifier for required in permission_s : domain = Permission . get_domain ( required ) assigned = self . get_authzd_permissions ( identifier , domain ) is_permitted = False for perms_blob in assigned : is_permitted = self . permission_verifier . is_permitted_from_json ( required , perms_blob ) yield ( required , is_permitted )
If the authorization info cannot be obtained from the accountstore permission check tuple yields False .
60,242
def has_role ( self , identifiers , required_role_s ) : identifier = identifiers . primary_identifier assigned_role_s = self . get_authzd_roles ( identifier ) if not assigned_role_s : msg = 'has_role: no roles obtained from account_store for [{0}]' . format ( identifier ) logger . warning ( msg ) for role in required_role_s : yield ( role , False ) else : for role in required_role_s : hasrole = ( { role } <= assigned_role_s ) yield ( role , hasrole )
Confirms whether a subject is a member of one or more roles .
60,243
def on_start ( self , session , session_context ) : session_id = session . session_id web_registry = session_context [ 'web_registry' ] if self . is_session_id_cookie_enabled : web_registry . session_id = session_id logger . debug ( "Set SessionID cookie using id: " + str ( session_id ) ) else : msg = ( "Session ID cookie is disabled. No cookie has been set for " "new session with id: " + str ( session_id ) ) logger . debug ( msg )
Stores the Session s ID usually as a Cookie to associate with future requests .
60,244
def is_session_storage_enabled ( self , subject = None ) : if subject . get_session ( False ) : return True if not self . session_storage_enabled : return False if ( not hasattr ( subject , 'web_registry' ) and self . session_manager and not isinstance ( self . session_manager , session_abcs . NativeSessionManager ) ) : return False return subject . web_registry . session_creation_enabled
Returns True if session storage is generally available ( as determined by the super class s global configuration property is_session_storage_enabled and no request - specific override has turned off session storage False otherwise .
60,245
def default_marshaller ( obj ) : if hasattr ( obj , '__getstate__' ) : return obj . __getstate__ ( ) try : return obj . __dict__ except AttributeError : raise TypeError ( '{!r} has no __dict__ attribute and does not implement __getstate__()' . format ( obj . __class__ . __name__ ) )
Retrieve the state of the given object .
60,246
def default_unmarshaller ( instance , state ) : if hasattr ( instance , '__setstate__' ) : instance . __setstate__ ( state ) else : try : instance . __dict__ . update ( state ) except AttributeError : raise TypeError ( '{!r} has no __dict__ attribute and does not implement __setstate__()' . format ( instance . __class__ . __name__ ) )
Restore the state of an object .
60,247
def do_authenticate_account ( self , authc_token ) : try : realms = self . token_realm_resolver [ authc_token . __class__ ] except KeyError : raise KeyError ( 'Unsupported Token Type Provided: ' , authc_token . __class__ . __name__ ) if ( len ( self . realms ) == 1 ) : account = self . authenticate_single_realm_account ( realms [ 0 ] , authc_token ) else : account = self . authenticate_multi_realm_account ( self . realms , authc_token ) cred_type = authc_token . token_info [ 'cred_type' ] attempts = account [ 'authc_info' ] [ cred_type ] . get ( 'failed_attempts' , [ ] ) self . validate_locked ( authc_token , attempts ) if len ( account [ 'authc_info' ] ) > authc_token . token_info [ 'tier' ] : if self . mfa_dispatcher : realm = self . token_realm_resolver [ TOTPToken ] [ 0 ] totp_token = realm . generate_totp_token ( account ) mfa_info = account [ 'authc_info' ] [ 'totp_key' ] [ '2fa_info' ] self . mfa_dispatcher . dispatch ( authc_token . identifier , mfa_info , totp_token ) raise AdditionalAuthenticationRequired ( account [ 'account_id' ] ) return account
Returns an account object only when the current token authenticates AND the authentication process is complete raising otherwise
60,248
def extra_from_record ( self , record ) : return { attr_name : record . __dict__ [ attr_name ] for attr_name in record . __dict__ if attr_name not in BUILTIN_ATTRS }
Returns extra dict you passed to logger .
60,249
def save ( self , subject ) : if ( self . is_session_storage_enabled ( subject ) ) : self . merge_identity ( subject ) else : msg = ( "Session storage of subject state for Subject [{0}] has " "been disabled: identity and authentication state are " "expected to be initialized on every request or " "invocation." . format ( subject ) ) logger . debug ( msg ) return subject
Saves the subject s state to the subject s Session only if session storage is enabled for the subject . If session storage is not enabled for the specific Subject this method does nothing .
60,250
def create_manager ( self , yosai , settings , session_attributes ) : mgr_settings = SecurityManagerSettings ( settings ) attributes = mgr_settings . attributes realms = self . _init_realms ( settings , attributes [ 'realms' ] ) session_attributes = self . _init_session_attributes ( session_attributes , attributes ) serialization_manager = SerializationManager ( session_attributes , serializer_scheme = attributes [ 'serializer' ] ) cache_handler = self . _init_cache_handler ( settings , attributes [ 'cache_handler' ] , serialization_manager ) manager = mgr_settings . security_manager ( yosai , settings , realms = realms , cache_handler = cache_handler , serialization_manager = serialization_manager ) return manager
Order of execution matters . The sac must be set before the cache_handler is instantiated so that the cache_handler s serialization manager instance registers the sac .
60,251
def check_permission ( self , identifiers , permission_s , logical_operator ) : self . assert_realms_configured ( ) permitted = self . is_permitted_collective ( identifiers , permission_s , logical_operator ) if not permitted : msg = "Subject lacks permission(s) to satisfy logical operation" raise UnauthorizedException ( msg )
like Yosai s authentication process the authorization process will raise an Exception to halt further authz checking once Yosai determines that a Subject is unauthorized to receive the requested permission
60,252
def by_type ( self , identifier_class ) : myidentifiers = set ( ) for identifier in self . source_identifiers . values ( ) : if ( isinstance ( identifier , identifier_class ) ) : myidentifiers . update ( [ identifier ] ) return set ( myidentifiers )
returns all unique instances of a type of identifier
60,253
def create ( self , session ) : sessionid = super ( ) . create ( session ) self . _cache ( session , sessionid ) return sessionid
caches the session and caches an entry to associate the cached session with the subject
60,254
def start ( self , session_context ) : session = self . _create_session ( session_context ) self . session_handler . on_start ( session , session_context ) mysession = session_tuple ( None , session . session_id ) self . notify_event ( mysession , 'SESSION.START' ) return self . create_exposed_session ( session = session , context = session_context )
unlike shiro yosai does not apply session timeouts from within the start method of the SessionManager but rather defers timeout settings responsibilities to the SimpleSession which uses session_settings
60,255
def _setup ( self , name = None ) : envvar = self . __dict__ [ 'env_var' ] if envvar : settings_file = os . environ . get ( envvar ) else : settings_file = self . __dict__ [ 'file_path' ] if not settings_file : msg = ( "Requested settings, but none can be obtained for the envvar." "Since no config filepath can be obtained, a default config " "will be used." ) logger . error ( msg ) raise OSError ( msg ) self . _wrapped = Settings ( settings_file )
Load the settings module referenced by env_var . This environment - defined configuration process is called during the settings configuration process .
60,256
def remember_encrypted_identity ( self , subject , encrypted ) : try : encoded = base64 . b64encode ( encrypted ) . decode ( 'utf-8' ) subject . web_registry . remember_me = encoded except AttributeError : msg = ( "Subject argument is not an HTTP-aware instance. This " "is required to obtain a web registry in order to" "set the RememberMe cookie. Returning immediately " "and ignoring RememberMe operation." ) logger . debug ( msg )
Base64 - encodes the specified serialized byte array and sets that base64 - encoded String as the cookie value .
60,257
def get_remembered_encrypted_identity ( self , subject_context ) : if ( self . is_identity_removed ( subject_context ) ) : if not isinstance ( subject_context , web_subject_abcs . WebSubjectContext ) : msg = ( "SubjectContext argument is not an HTTP-aware instance. " "This is required to obtain a web registry " "in order to retrieve the RememberMe cookie. Returning " "immediately and ignoring rememberMe operation." ) logger . debug ( msg ) return None remember_me = subject_context . web_registry . remember_me if remember_me : logger . debug ( "Acquired encoded identity [" + str ( remember_me ) + "]" ) encrypted = base64 . b64decode ( remember_me ) return encrypted else : return None
Returns a previously serialized identity byte array or None if the byte array could not be acquired .
60,258
def _get_settings_class ( ) : if not hasattr ( django_settings , "AUTH_ADFS" ) : msg = "The configuration directive 'AUTH_ADFS' was not found in your Django settings" raise ImproperlyConfigured ( msg ) cls = django_settings . AUTH_ADFS . get ( 'SETTINGS_CLASS' , DEFAULT_SETTINGS_CLASS ) return import_string ( cls )
Get the AUTH_ADFS setting from the Django settings .
60,259
def build_authorization_endpoint ( self , request , disable_sso = None ) : self . load_config ( ) redirect_to = request . GET . get ( REDIRECT_FIELD_NAME , None ) if not redirect_to : redirect_to = django_settings . LOGIN_REDIRECT_URL redirect_to = base64 . urlsafe_b64encode ( redirect_to . encode ( ) ) . decode ( ) query = QueryDict ( mutable = True ) query . update ( { "response_type" : "code" , "client_id" : settings . CLIENT_ID , "resource" : settings . RELYING_PARTY_ID , "redirect_uri" : self . redirect_uri ( request ) , "state" : redirect_to , } ) if self . _mode == "openid_connect" : query [ "scope" ] = "openid" if ( disable_sso is None and settings . DISABLE_SSO ) or disable_sso is True : query [ "prompt" ] = "login" return "{0}?{1}" . format ( self . authorization_endpoint , query . urlencode ( ) )
This function returns the ADFS authorization URL .
60,260
def create_user ( self , claims ) : username_claim = settings . USERNAME_CLAIM usermodel = get_user_model ( ) user , created = usermodel . objects . get_or_create ( ** { usermodel . USERNAME_FIELD : claims [ username_claim ] } ) if created or not user . password : user . set_unusable_password ( ) logger . debug ( "User '{}' has been created." . format ( claims [ username_claim ] ) ) return user
Create the user if it doesn t exist yet
60,261
def update_user_attributes ( self , user , claims ) : required_fields = [ field . name for field in user . _meta . fields if field . blank is False ] for field , claim in settings . CLAIM_MAPPING . items ( ) : if hasattr ( user , field ) : if claim in claims : setattr ( user , field , claims [ claim ] ) logger . debug ( "Attribute '{}' for user '{}' was set to '{}'." . format ( field , user , claims [ claim ] ) ) else : if field in required_fields : msg = "Claim not found in access token: '{}'. Check ADFS claims mapping." raise ImproperlyConfigured ( msg . format ( claim ) ) else : msg = "Claim '{}' for user field '{}' was not found in the access token for user '{}'. " "Field is not required and will be left empty" . format ( claim , field , user ) logger . warning ( msg ) else : msg = "User model has no field named '{}'. Check ADFS claims mapping." raise ImproperlyConfigured ( msg . format ( field ) )
Updates user attributes based on the CLAIM_MAPPING setting .
60,262
def update_user_groups ( self , user , claims ) : if settings . GROUPS_CLAIM is not None : django_groups = [ group . name for group in user . groups . all ( ) ] if settings . GROUPS_CLAIM in claims : claim_groups = claims [ settings . GROUPS_CLAIM ] if not isinstance ( claim_groups , list ) : claim_groups = [ claim_groups , ] else : logger . debug ( "The configured groups claim '{}' was not found in the access token" . format ( settings . GROUPS_CLAIM ) ) claim_groups = [ ] groups_to_remove = set ( django_groups ) - set ( claim_groups ) groups_to_add = set ( claim_groups ) - set ( django_groups ) for group_name in groups_to_remove : group = Group . objects . get ( name = group_name ) user . groups . remove ( group ) logger . debug ( "User removed from group '{}'" . format ( group_name ) ) for group_name in groups_to_add : try : if settings . MIRROR_GROUPS : group , _ = Group . objects . get_or_create ( name = group_name ) logger . debug ( "Created group '{}'" . format ( group_name ) ) else : group = Group . objects . get ( name = group_name ) user . groups . add ( group ) logger . debug ( "User added to group '{}'" . format ( group_name ) ) except ObjectDoesNotExist : pass
Updates user group memberships based on the GROUPS_CLAIM setting .
60,263
def update_user_flags ( self , user , claims ) : if settings . GROUPS_CLAIM is not None : if settings . GROUPS_CLAIM in claims : access_token_groups = claims [ settings . GROUPS_CLAIM ] if not isinstance ( access_token_groups , list ) : access_token_groups = [ access_token_groups , ] else : logger . debug ( "The configured group claim was not found in the access token" ) access_token_groups = [ ] for flag , group in settings . GROUP_TO_FLAG_MAPPING . items ( ) : if hasattr ( user , flag ) : if group in access_token_groups : value = True else : value = False setattr ( user , flag , value ) logger . debug ( "Attribute '{}' for user '{}' was set to '{}'." . format ( user , flag , value ) ) else : msg = "User model has no field named '{}'. Check ADFS boolean claims mapping." raise ImproperlyConfigured ( msg . format ( flag ) ) for field , claim in settings . BOOLEAN_CLAIM_MAPPING . items ( ) : if hasattr ( user , field ) : bool_val = False if claim in claims and str ( claims [ claim ] ) . lower ( ) in [ 'y' , 'yes' , 't' , 'true' , 'on' , '1' ] : bool_val = True setattr ( user , field , bool_val ) logger . debug ( 'Attribute "{}" for user "{}" was set to "{}".' . format ( user , field , bool_val ) ) else : msg = "User model has no field named '{}'. Check ADFS boolean claims mapping." raise ImproperlyConfigured ( msg . format ( field ) )
Updates user boolean attributes based on the BOOLEAN_CLAIM_MAPPING setting .
60,264
def get ( self , request ) : code = request . GET . get ( "code" ) if not code : return render ( request , 'django_auth_adfs/login_failed.html' , { 'error_message' : "No authorization code was provided." , } , status = 400 ) redirect_to = request . GET . get ( "state" ) user = authenticate ( request = request , authorization_code = code ) if user is not None : if user . is_active : login ( request , user ) if redirect_to : redirect_to = base64 . urlsafe_b64decode ( redirect_to . encode ( ) ) . decode ( ) else : redirect_to = django_settings . LOGIN_REDIRECT_URL url_is_safe = is_safe_url ( url = redirect_to , allowed_hosts = [ request . get_host ( ) ] , require_https = request . is_secure ( ) , ) redirect_to = redirect_to if url_is_safe else '/' return redirect ( redirect_to ) else : return render ( request , 'django_auth_adfs/login_failed.html' , { 'error_message' : "Your account is disabled." , } , status = 403 ) else : return render ( request , 'django_auth_adfs/login_failed.html' , { 'error_message' : "Login failed." , } , status = 401 )
Handles the redirect from ADFS to our site . We try to process the passed authorization code and login the user .
60,265
def authenticate ( self , request ) : auth = get_authorization_header ( request ) . split ( ) if not auth or auth [ 0 ] . lower ( ) != b'bearer' : return None if len ( auth ) == 1 : msg = 'Invalid authorization header. No credentials provided.' raise exceptions . AuthenticationFailed ( msg ) elif len ( auth ) > 2 : msg = 'Invalid authorization header. Access token should not contain spaces.' raise exceptions . AuthenticationFailed ( msg ) user = authenticate ( access_token = auth [ 1 ] ) if user is None : raise exceptions . AuthenticationFailed ( 'Invalid access token.' ) if not user . is_active : raise exceptions . AuthenticationFailed ( 'User inactive or deleted.' ) return user , auth [ 1 ]
Returns a User if a correct access token has been supplied in the Authorization header . Otherwise returns None .
60,266
def molecular_orbital ( coords , mocoeffs , gbasis ) : def f ( x , y , z , coords = coords , mocoeffs = mocoeffs , gbasis = gbasis ) : return sum ( c * bf ( x * 10 , y * 10 , z * 10 ) for c , bf in zip ( mocoeffs , getbfs ( coords * 10 , gbasis ) ) ) return f
Return a molecular orbital given the nuclei coordinates as well as molecular orbital coefficients and basis set specification as given by the cclib library .
60,267
def getbfs ( coords , gbasis ) : sym2powerlist = { 'S' : [ ( 0 , 0 , 0 ) ] , 'P' : [ ( 1 , 0 , 0 ) , ( 0 , 1 , 0 ) , ( 0 , 0 , 1 ) ] , 'D' : [ ( 2 , 0 , 0 ) , ( 0 , 2 , 0 ) , ( 0 , 0 , 2 ) , ( 1 , 1 , 0 ) , ( 0 , 1 , 1 ) , ( 1 , 0 , 1 ) ] , 'F' : [ ( 3 , 0 , 0 ) , ( 2 , 1 , 0 ) , ( 2 , 0 , 1 ) , ( 1 , 2 , 0 ) , ( 1 , 1 , 1 ) , ( 1 , 0 , 2 ) , ( 0 , 3 , 0 ) , ( 0 , 2 , 1 ) , ( 0 , 1 , 2 ) , ( 0 , 0 , 3 ) ] } bfs = [ ] for i , at_coords in enumerate ( coords ) : bs = gbasis [ i ] for sym , prims in bs : for power in sym2powerlist [ sym ] : bf = cgbf ( at_coords , power ) for expnt , coef in prims : bf . add_pgbf ( expnt , coef ) bf . normalize ( ) bfs . append ( bf ) return bfs
Convenience function for both wavefunction and density based on PyQuante Ints . py .
60,268
def A_term ( i , r , u , l1 , l2 , PAx , PBx , CPx , gamma ) : return pow ( - 1 , i ) * binomial_prefactor ( i , l1 , l2 , PAx , PBx ) * pow ( - 1 , u ) * factorial ( i ) * pow ( CPx , i - 2 * r - 2 * u ) * pow ( 0.25 / gamma , r + u ) / factorial ( r ) / factorial ( u ) / factorial ( i - 2 * r - 2 * u )
THO eq . 2 . 18
60,269
def A_array ( l1 , l2 , PA , PB , CP , g ) : Imax = l1 + l2 + 1 A = [ 0 ] * Imax for i in range ( Imax ) : for r in range ( int ( floor ( i / 2 ) + 1 ) ) : for u in range ( int ( floor ( ( i - 2 * r ) / 2 ) + 1 ) ) : I = i - 2 * r - u A [ I ] = A [ I ] + A_term ( i , r , u , l1 , l2 , PA , PB , CP , g ) return A
THO eq . 2 . 18 and 3 . 1
60,270
def _normalize ( self ) : "Normalize basis function. From THO eq. 2.2" l , m , n = self . powers self . norm = np . sqrt ( pow ( 2 , 2 * ( l + m + n ) + 1.5 ) * pow ( self . exponent , l + m + n + 1.5 ) / fact2 ( 2 * l - 1 ) / fact2 ( 2 * m - 1 ) / fact2 ( 2 * n - 1 ) / pow ( np . pi , 1.5 ) ) return
Normalize basis function . From THO eq . 2 . 2
60,271
def hide ( self , selections ) : if 'atoms' in selections : self . hidden_state [ 'atoms' ] = selections [ 'atoms' ] self . on_atom_hidden_changed ( ) if 'bonds' in selections : self . hidden_state [ 'bonds' ] = selections [ 'bonds' ] self . on_bond_hidden_changed ( ) if 'box' in selections : self . hidden_state [ 'box' ] = box_s = selections [ 'box' ] if box_s . mask [ 0 ] : if self . viewer . has_renderer ( self . box_renderer ) : self . viewer . remove_renderer ( self . box_renderer ) else : if not self . viewer . has_renderer ( self . box_renderer ) : self . viewer . add_renderer ( self . box_renderer ) return self . hidden_state
Hide objects in this representation . BallAndStickRepresentation support selections of atoms and bonds .
60,272
def change_radius ( self , selections , value ) : if 'atoms' in selections : atms = selections [ 'atoms' ] . mask if value is None : self . radii_state . array [ atms ] = [ vdw_radii . get ( t ) * 0.3 for t in self . system . type_array [ atms ] ] else : self . radii_state . array [ atms ] = value self . update_scale_factors ( self . scale_factors )
Change the radius of each atom by a certain value
60,273
def paintGL ( self ) : if self . post_processing : glBindFramebuffer ( GL_FRAMEBUFFER , self . fb0 ) glViewport ( 0 , 0 , self . width ( ) , self . height ( ) ) status = glCheckFramebufferStatus ( GL_FRAMEBUFFER ) if ( status != GL_FRAMEBUFFER_COMPLETE ) : reason = dict ( GL_FRAMEBUFFER_UNDEFINED = 'UNDEFINED' , GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT = 'INCOMPLETE_ATTACHMENT' , GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT = 'INCOMPLETE_MISSING_ATTACHMENT' , GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER = 'INCOMPLETE_DRAW_BUFFER' , GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER = 'INCOMPLETE_READ_BUFFER' , GL_FRAMEBUFFER_UNSUPPORTED = 'UNSUPPORTED' , ) [ status ] raise Exception ( 'Framebuffer is not complete: {}' . format ( reason ) ) else : glBindFramebuffer ( GL_FRAMEBUFFER , DEFAULT_FRAMEBUFFER ) bg_r , bg_g , bg_b , bg_a = self . background_color glClearColor ( bg_r / 255 , bg_g / 255 , bg_b / 255 , bg_a / 255 ) glClear ( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT ) proj = self . camera . projection cam = self . camera . matrix self . mvproj = np . dot ( proj , cam ) self . ldir = cam [ : 3 , : 3 ] . T . dot ( self . light_dir ) self . on_draw_world ( ) if self . post_processing : if len ( self . post_processing ) > 1 : newarg = self . textures . copy ( ) for i , pp in enumerate ( self . post_processing [ : - 1 ] ) : if i % 2 : outfb = self . fb1 outtex = self . _extra_textures [ 'fb1' ] else : outfb = self . fb2 outtex = self . _extra_textures [ 'fb2' ] pp . render ( outfb , newarg ) newarg [ 'color' ] = outtex self . post_processing [ - 1 ] . render ( DEFAULT_FRAMEBUFFER , newarg ) else : self . post_processing [ 0 ] . render ( DEFAULT_FRAMEBUFFER , self . textures ) self . on_draw_ui ( )
GL function called each time a frame is drawn
60,274
def toimage ( self , width = None , height = None ) : from . postprocessing import NoEffect effect = NoEffect ( self ) self . post_processing . append ( effect ) oldwidth , oldheight = self . width ( ) , self . height ( ) if None not in ( width , height ) : self . resize ( width , height ) self . resizeGL ( width , height ) else : width = self . width ( ) height = self . height ( ) self . paintGL ( ) self . post_processing . remove ( effect ) coltex = effect . texture coltex . bind ( ) glActiveTexture ( GL_TEXTURE0 ) data = glGetTexImage ( GL_TEXTURE_2D , 0 , GL_RGBA , GL_UNSIGNED_BYTE ) image = pil_Image . frombuffer ( 'RGBA' , ( width , height ) , data , 'raw' , 'RGBA' , 0 , - 1 ) return image
Return the current scene as a PIL Image .
60,275
def _real ( coords1 , charges1 , coords2 , charges2 , rcut , alpha , box ) : n = coords1 . shape [ 0 ] m = coords2 . shape [ 0 ] a = box [ 0 ] b = box [ 1 ] c = box [ 2 ] l_max = int ( np . ceil ( 2.0 * rcut / np . min ( np . trace ( box ) ) ) ) result = np . zeros ( n ) for i in range ( n ) : q_i = charges1 [ i ] r_i = coords1 [ i ] for j in range ( m ) : q_j = charges2 [ j ] r_j = coords2 [ j ] for l_i in range ( - l_max , l_max + 1 ) : for l_j in range ( - l_max , l_max + 1 ) : for l_k in range ( - l_max , l_max + 1 ) : nv = l_i * a + l_j * b + l_k * c r_j_n = r_j + nv r_ij = _dist ( r_i , r_j_n ) if r_ij < 1e-10 or r_ij > rcut : continue value = q_i * q_j * math . erfc ( alpha * r_ij ) / r_ij result [ i ] += value return result
Calculate ewald real part . Box has to be a cuboidal box you should transform any other box shape to a cuboidal box before using this .
60,276
def _reciprocal ( coords1 , charges1 , coords2 , charges2 , kmax , kappa , box ) : n = coords1 . shape [ 0 ] m = coords2 . shape [ 0 ] result = np . zeros ( n , dtype = np . float64 ) need_self = np . zeros ( n , dtype = np . uint8 ) g1 , g2 , g3 = reciprocal_vectors ( box ) V = box_volume ( box ) prefac = 1.0 / ( np . pi * V ) for i in range ( n ) : q_i = charges1 [ i ] r_i = coords1 [ i ] for j in range ( m ) : q_j = charges2 [ j ] r_j = coords2 [ j ] r_ij = _dist ( r_i , r_j ) if r_ij < 1e-10 : need_self [ i ] = 1 for k_i in range ( - kmax , kmax + 1 ) : for k_j in range ( - kmax , kmax + 1 ) : for k_k in range ( - kmax , kmax + 1 ) : if k_i == 0 and k_j == 0 and k_k == 0 : continue k = k_i * g1 + k_j * g2 + k_k * g3 k_sq = sqsum ( k ) result [ i ] += ( prefac * q_i * q_j * 4.0 * np . pi ** 2 / k_sq * math . exp ( - k_sq / ( 4.0 * kappa ** 2 ) ) * math . cos ( np . dot ( k , r_i - r_j ) ) ) self_energy = 2 * ( need_self * kappa * charges1 ** 2 ) / ( np . pi ** 0.5 ) return result - self_energy
Calculate ewald reciprocal part . Box has to be a cuboidal box you should transform any other box shape to a cuboidal box before using this .
60,277
def update_bounds ( self , bounds ) : starts = bounds [ : , 0 , : ] ends = bounds [ : , 1 , : ] self . bounds = bounds self . lengths = np . sqrt ( ( ( ends - starts ) ** 2 ) . sum ( axis = 1 ) ) vertices , normals , colors = self . _process_reference ( ) self . tr . update_vertices ( vertices ) self . tr . update_normals ( normals )
Update cylinders start and end positions
60,278
def make_trajectory ( first , filename , restart = False ) : mode = 'w' if restart : mode = 'a' return Trajectory ( first , filename , mode )
Factory function to easily create a trajectory object
60,279
def has_key ( self , key ) : k = self . _lowerOrReturn ( key ) return k in self . data
Case insensitive test whether key exists .
60,280
def update_positions ( self , positions ) : sphs_verts = self . sphs_verts_radii . copy ( ) sphs_verts += positions . reshape ( self . n_spheres , 1 , 3 ) self . tr . update_vertices ( sphs_verts ) self . poslist = positions
Update the sphere positions .
60,281
def isnamedtuple ( obj ) : return isinstance ( obj , tuple ) and hasattr ( obj , "_fields" ) and hasattr ( obj , "_asdict" ) and callable ( obj . _asdict )
Heuristic check if an object is a namedtuple .
60,282
def running_coordination_number ( coordinates_a , coordinates_b , periodic , binsize = 0.002 , cutoff = 1.5 ) : x , y = rdf ( coordinates_a , coordinates_b , periodic = periodic , normalize = False , binsize = binsize , cutoff = cutoff ) y = y . astype ( 'float32' ) / len ( coordinates_a ) y = np . cumsum ( y ) return x , y
This is the cumulative radial distribution function also called running coordination number
60,283
def update_colors ( self , colors ) : colors = np . array ( colors , dtype = np . uint8 ) self . _vbo_c . set_data ( colors ) self . _vbo_c . unbind ( )
Update the colors
60,284
def frames ( skip = 1 ) : from PyQt4 import QtGui for i in range ( 0 , viewer . traj_controls . max_index , skip ) : viewer . traj_controls . goto_frame ( i ) yield i QtGui . qApp . processEvents ( )
Useful command to iterate on the trajectory frames . It can be used in a for loop .
60,285
def display_system ( system , autozoom = True ) : viewer . clear ( ) viewer . add_representation ( BallAndStickRepresentation , system ) if autozoom : autozoom_ ( ) viewer . update ( ) msg ( str ( system ) )
Display a ~chemlab . core . System instance at screen
60,286
def display_molecule ( mol , autozoom = True ) : s = System ( [ mol ] ) display_system ( s , autozoom = True )
Display a ~chemlab . core . Molecule instance in the viewer .
60,287
def load_molecule ( name , format = None ) : mol = datafile ( name , format = format ) . read ( 'molecule' ) display_system ( System ( [ mol ] ) )
Read a ~chemlab . core . Molecule from a file .
60,288
def write_system ( filename , format = None ) : datafile ( filename , format = format , mode = 'w' ) . write ( 'system' , current_system ( ) )
Write the system currently displayed to a file .
60,289
def write_molecule ( filename , format = None ) : datafile ( filename , format = format , mode = 'w' ) . write ( 'molecule' , current_system ( ) )
Write the system displayed in a file as a molecule .
60,290
def load_trajectory ( name , skip = 1 , format = None ) : df = datafile ( name , format = format ) dt , coords = df . read ( 'trajectory' , skip = skip ) boxes = df . read ( 'boxes' ) viewer . current_traj = coords viewer . frame_times = dt viewer . traj_controls . set_ticks ( len ( dt ) ) def update ( index ) : f = coords [ index ] for fp in _frame_processors : f = fp ( coords , index ) viewer . representation . update_positions ( f ) viewer . representation . update_box ( boxes [ index ] ) current_system ( ) . r_array = f current_system ( ) . box_vectors = boxes [ index ] viewer . traj_controls . set_time ( dt [ index ] ) viewer . update ( ) viewer . traj_controls . show ( ) viewer . traj_controls . frame_changed . connect ( update )
Load a trajectory file into chemlab . You should call this command after you load a ~chemlab . core . System through load_system or load_remote_system .
60,291
def from_arrays ( cls , ** kwargs ) : if 'mol_indices' in kwargs : raise DeprecationWarning ( 'The mol_indices argument is deprecated, use maps instead. (See from_arrays docstring)' ) return super ( System , cls ) . from_arrays ( ** kwargs )
Initialize a System from its constituent arrays . It is the fastest way to initialize a System well suited for reading one or more big System from data files .
60,292
def minimum_image ( self ) : if self . box_vectors is None : raise ValueError ( 'No periodic vectors defined' ) else : self . r_array = minimum_image ( self . r_array , self . box_vectors . diagonal ( ) ) return self
Align the system according to the minimum image convention
60,293
def where ( self , within_of = None , inplace = False , ** kwargs ) : masks = super ( System , self ) . where ( inplace = inplace , ** kwargs ) def index_to_mask ( index , n ) : val = np . zeros ( n , dtype = 'bool' ) val [ index ] = True return val def masks_and ( dict1 , dict2 ) : return { k : dict1 [ k ] & index_to_mask ( dict2 [ k ] , len ( dict1 [ k ] ) ) for k in dict1 } if within_of is not None : if self . box_vectors is None : raise Exception ( 'Only periodic distance supported' ) thr , ref = within_of if isinstance ( ref , int ) : a = self . r_array [ ref ] [ np . newaxis , np . newaxis , : ] elif len ( ref ) == 1 : a = self . r_array [ ref ] [ np . newaxis , : ] else : a = self . r_array [ ref ] [ : , np . newaxis , : ] b = self . r_array [ np . newaxis , : , : ] dist = periodic_distance ( a , b , periodic = self . box_vectors . diagonal ( ) ) atoms = ( dist <= thr ) . sum ( axis = 0 , dtype = 'bool' ) m = self . _propagate_dim ( atoms , 'atom' ) masks = masks_and ( masks , m ) return masks
Return indices that met the conditions
60,294
def _gser ( a , x ) : "Series representation of Gamma. NumRec sect 6.1." ITMAX = 100 EPS = 3.e-7 gln = lgamma ( a ) assert ( x >= 0 ) , 'x < 0 in gser' if x == 0 : return 0 , gln ap = a delt = sum = 1. / a for i in range ( ITMAX ) : ap = ap + 1. delt = delt * x / ap sum = sum + delt if abs ( delt ) < abs ( sum ) * EPS : break else : print ( 'a too large, ITMAX too small in gser' ) gamser = sum * np . exp ( - x + a * np . log ( x ) - gln ) return gamser , gln
Series representation of Gamma . NumRec sect 6 . 1 .
60,295
def _gcf ( a , x ) : "Continued fraction representation of Gamma. NumRec sect 6.1" ITMAX = 100 EPS = 3.e-7 FPMIN = 1.e-30 gln = lgamma ( a ) b = x + 1. - a c = 1. / FPMIN d = 1. / b h = d for i in range ( 1 , ITMAX + 1 ) : an = - i * ( i - a ) b = b + 2. d = an * d + b if abs ( d ) < FPMIN : d = FPMIN c = b + an / c if abs ( c ) < FPMIN : c = FPMIN d = 1. / d delt = d * c h = h * delt if abs ( delt - 1. ) < EPS : break else : print ( 'a too large, ITMAX too small in gcf' ) gammcf = np . exp ( - x + a * np . log ( x ) - gln ) * h return gammcf , gln
Continued fraction representation of Gamma . NumRec sect 6 . 1
60,296
def dmat ( c , nocc ) : "Form the density matrix from the first nocc orbitals of c" return np . dot ( c [ : , : nocc ] , c [ : , : nocc ] . T )
Form the density matrix from the first nocc orbitals of c
60,297
def geigh ( H , S ) : "Solve the generalized eigensystem Hc = ESc" A = cholorth ( S ) E , U = np . linalg . eigh ( simx ( H , A ) ) return E , np . dot ( A , U )
Solve the generalized eigensystem Hc = ESc
60,298
def _check_periodic ( periodic ) : periodic = np . array ( periodic ) if len ( periodic . shape ) == 2 : assert periodic . shape [ 0 ] == periodic . shape [ 1 ] , 'periodic shoud be a square matrix or a flat array' return np . diag ( periodic ) elif len ( periodic . shape ) == 1 : return periodic else : raise ValueError ( "periodic argument can be either a 3x3 matrix or a shape 3 array." )
Validate periodic input
60,299
def count_neighbors ( coordinates_a , coordinates_b , periodic , r ) : indices = nearest_neighbors ( coordinates_a , coordinates_b , periodic , r = r ) [ 0 ] if len ( indices ) == 0 : return 0 if isinstance ( indices [ 0 ] , collections . Iterable ) : return [ len ( ix ) for ix in indices ] else : return len ( indices )
Count the neighbours number of neighbors .