Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
Url.request_uri
(self)
Absolute path including the query string.
Absolute path including the query string.
def request_uri(self): """Absolute path including the query string.""" uri = self.path or "/" if self.query is not None: uri += "?" + self.query return uri
[ "def", "request_uri", "(", "self", ")", ":", "uri", "=", "self", ".", "path", "or", "\"/\"", "if", "self", ".", "query", "is", "not", "None", ":", "uri", "+=", "\"?\"", "+", "self", ".", "query", "return", "uri" ]
[ 114, 4 ]
[ 121, 18 ]
python
en
['en', 'en', 'en']
True
Url.netloc
(self)
Network location including host and port
Network location including host and port
def netloc(self): """Network location including host and port""" if self.port: return "%s:%d" % (self.host, self.port) return self.host
[ "def", "netloc", "(", "self", ")", ":", "if", "self", ".", "port", ":", "return", "\"%s:%d\"", "%", "(", "self", ".", "host", ",", "self", ".", "port", ")", "return", "self", ".", "host" ]
[ 124, 4 ]
[ 128, 24 ]
python
en
['en', 'en', 'en']
True
Url.url
(self)
Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:[email protected]:80/path?query#fragment'
Convert self into a url
def url(self): """ Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url('http://google.com/mail/') >>> U.url 'http://google.com/mail/' >>> Url('http', 'username:password', 'host.com', 80, ... '/path', 'query', 'fragment').url 'http://username:[email protected]:80/path?query#fragment' """ scheme, auth, host, port, path, query, fragment = self url = u"" # We use "is not None" we want things to happen with empty strings (or 0 port) if scheme is not None: url += scheme + u"://" if auth is not None: url += auth + u"@" if host is not None: url += host if port is not None: url += u":" + str(port) if path is not None: url += path if query is not None: url += u"?" + query if fragment is not None: url += u"#" + fragment return url
[ "def", "url", "(", "self", ")", ":", "scheme", ",", "auth", ",", "host", ",", "port", ",", "path", ",", "query", ",", "fragment", "=", "self", "url", "=", "u\"\"", "# We use \"is not None\" we want things to happen with empty strings (or 0 port)", "if", "scheme", "is", "not", "None", ":", "url", "+=", "scheme", "+", "u\"://\"", "if", "auth", "is", "not", "None", ":", "url", "+=", "auth", "+", "u\"@\"", "if", "host", "is", "not", "None", ":", "url", "+=", "host", "if", "port", "is", "not", "None", ":", "url", "+=", "u\":\"", "+", "str", "(", "port", ")", "if", "path", "is", "not", "None", ":", "url", "+=", "path", "if", "query", "is", "not", "None", ":", "url", "+=", "u\"?\"", "+", "query", "if", "fragment", "is", "not", "None", ":", "url", "+=", "u\"#\"", "+", "fragment", "return", "url" ]
[ 131, 4 ]
[ 168, 18 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.create_test_db
(self, verbosity=1, autoclobber=False, serialize=True, keepdb=False)
Create a test database, prompting the user for confirmation if the database already exists. Return the name of the test database created.
Create a test database, prompting the user for confirmation if the database already exists. Return the name of the test database created.
def create_test_db(self, verbosity=1, autoclobber=False, serialize=True, keepdb=False): """ Create a test database, prompting the user for confirmation if the database already exists. Return the name of the test database created. """ # Don't import django.core.management if it isn't needed. from django.core.management import call_command test_database_name = self._get_test_db_name() if verbosity >= 1: action = 'Creating' if keepdb: action = "Using existing" self.log('%s test database for alias %s...' % ( action, self._get_database_display_str(verbosity, test_database_name), )) # We could skip this call if keepdb is True, but we instead # give it the keepdb param. This is to handle the case # where the test DB doesn't exist, in which case we need to # create it, then just not destroy it. If we instead skip # this, we will get an exception. self._create_test_db(verbosity, autoclobber, keepdb) self.connection.close() settings.DATABASES[self.connection.alias]["NAME"] = test_database_name self.connection.settings_dict["NAME"] = test_database_name try: if self.connection.settings_dict['TEST']['MIGRATE'] is False: # Disable migrations for all apps. old_migration_modules = settings.MIGRATION_MODULES settings.MIGRATION_MODULES = { app.label: None for app in apps.get_app_configs() } # We report migrate messages at one level lower than that # requested. This ensures we don't get flooded with messages during # testing (unless you really ask to be flooded). call_command( 'migrate', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias, run_syncdb=True, ) finally: if self.connection.settings_dict['TEST']['MIGRATE'] is False: settings.MIGRATION_MODULES = old_migration_modules # We then serialize the current state of the database into a string # and store it on the connection. This slightly horrific process is so people # who are testing on databases without transactions or who are using # a TransactionTestCase still get a clean database on every test run. if serialize: self.connection._test_serialized_contents = self.serialize_db_to_string() call_command('createcachetable', database=self.connection.alias) # Ensure a connection for the side effect of initializing the test database. self.connection.ensure_connection() if os.environ.get('RUNNING_DJANGOS_TEST_SUITE') == 'true': self.mark_expected_failures_and_skips() return test_database_name
[ "def", "create_test_db", "(", "self", ",", "verbosity", "=", "1", ",", "autoclobber", "=", "False", ",", "serialize", "=", "True", ",", "keepdb", "=", "False", ")", ":", "# Don't import django.core.management if it isn't needed.", "from", "django", ".", "core", ".", "management", "import", "call_command", "test_database_name", "=", "self", ".", "_get_test_db_name", "(", ")", "if", "verbosity", ">=", "1", ":", "action", "=", "'Creating'", "if", "keepdb", ":", "action", "=", "\"Using existing\"", "self", ".", "log", "(", "'%s test database for alias %s...'", "%", "(", "action", ",", "self", ".", "_get_database_display_str", "(", "verbosity", ",", "test_database_name", ")", ",", ")", ")", "# We could skip this call if keepdb is True, but we instead", "# give it the keepdb param. This is to handle the case", "# where the test DB doesn't exist, in which case we need to", "# create it, then just not destroy it. If we instead skip", "# this, we will get an exception.", "self", ".", "_create_test_db", "(", "verbosity", ",", "autoclobber", ",", "keepdb", ")", "self", ".", "connection", ".", "close", "(", ")", "settings", ".", "DATABASES", "[", "self", ".", "connection", ".", "alias", "]", "[", "\"NAME\"", "]", "=", "test_database_name", "self", ".", "connection", ".", "settings_dict", "[", "\"NAME\"", "]", "=", "test_database_name", "try", ":", "if", "self", ".", "connection", ".", "settings_dict", "[", "'TEST'", "]", "[", "'MIGRATE'", "]", "is", "False", ":", "# Disable migrations for all apps.", "old_migration_modules", "=", "settings", ".", "MIGRATION_MODULES", "settings", ".", "MIGRATION_MODULES", "=", "{", "app", ".", "label", ":", "None", "for", "app", "in", "apps", ".", "get_app_configs", "(", ")", "}", "# We report migrate messages at one level lower than that", "# requested. This ensures we don't get flooded with messages during", "# testing (unless you really ask to be flooded).", "call_command", "(", "'migrate'", ",", "verbosity", "=", "max", "(", "verbosity", "-", "1", ",", "0", ")", ",", "interactive", "=", "False", ",", "database", "=", "self", ".", "connection", ".", "alias", ",", "run_syncdb", "=", "True", ",", ")", "finally", ":", "if", "self", ".", "connection", ".", "settings_dict", "[", "'TEST'", "]", "[", "'MIGRATE'", "]", "is", "False", ":", "settings", ".", "MIGRATION_MODULES", "=", "old_migration_modules", "# We then serialize the current state of the database into a string", "# and store it on the connection. This slightly horrific process is so people", "# who are testing on databases without transactions or who are using", "# a TransactionTestCase still get a clean database on every test run.", "if", "serialize", ":", "self", ".", "connection", ".", "_test_serialized_contents", "=", "self", ".", "serialize_db_to_string", "(", ")", "call_command", "(", "'createcachetable'", ",", "database", "=", "self", ".", "connection", ".", "alias", ")", "# Ensure a connection for the side effect of initializing the test database.", "self", ".", "connection", ".", "ensure_connection", "(", ")", "if", "os", ".", "environ", ".", "get", "(", "'RUNNING_DJANGOS_TEST_SUITE'", ")", "==", "'true'", ":", "self", ".", "mark_expected_failures_and_skips", "(", ")", "return", "test_database_name" ]
[ 31, 4 ]
[ 99, 33 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.set_as_test_mirror
(self, primary_settings_dict)
Set this database up to be used in testing as a mirror of a primary database whose settings are given.
Set this database up to be used in testing as a mirror of a primary database whose settings are given.
def set_as_test_mirror(self, primary_settings_dict): """ Set this database up to be used in testing as a mirror of a primary database whose settings are given. """ self.connection.settings_dict['NAME'] = primary_settings_dict['NAME']
[ "def", "set_as_test_mirror", "(", "self", ",", "primary_settings_dict", ")", ":", "self", ".", "connection", ".", "settings_dict", "[", "'NAME'", "]", "=", "primary_settings_dict", "[", "'NAME'", "]" ]
[ 101, 4 ]
[ 106, 77 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.serialize_db_to_string
(self)
Serialize all data in the database into a JSON string. Designed only for test runner usage; will not handle large amounts of data.
Serialize all data in the database into a JSON string. Designed only for test runner usage; will not handle large amounts of data.
def serialize_db_to_string(self): """ Serialize all data in the database into a JSON string. Designed only for test runner usage; will not handle large amounts of data. """ # Iteratively return every object for all models to serialize. def get_objects(): from django.db.migrations.loader import MigrationLoader loader = MigrationLoader(self.connection) for app_config in apps.get_app_configs(): if ( app_config.models_module is not None and app_config.label in loader.migrated_apps and app_config.name not in settings.TEST_NON_SERIALIZED_APPS ): for model in app_config.get_models(): if ( model._meta.can_migrate(self.connection) and router.allow_migrate_model(self.connection.alias, model) ): queryset = model._base_manager.using( self.connection.alias, ).order_by(model._meta.pk.name) yield from queryset.iterator() # Serialize to a string out = StringIO() serializers.serialize("json", get_objects(), indent=None, stream=out) return out.getvalue()
[ "def", "serialize_db_to_string", "(", "self", ")", ":", "# Iteratively return every object for all models to serialize.", "def", "get_objects", "(", ")", ":", "from", "django", ".", "db", ".", "migrations", ".", "loader", "import", "MigrationLoader", "loader", "=", "MigrationLoader", "(", "self", ".", "connection", ")", "for", "app_config", "in", "apps", ".", "get_app_configs", "(", ")", ":", "if", "(", "app_config", ".", "models_module", "is", "not", "None", "and", "app_config", ".", "label", "in", "loader", ".", "migrated_apps", "and", "app_config", ".", "name", "not", "in", "settings", ".", "TEST_NON_SERIALIZED_APPS", ")", ":", "for", "model", "in", "app_config", ".", "get_models", "(", ")", ":", "if", "(", "model", ".", "_meta", ".", "can_migrate", "(", "self", ".", "connection", ")", "and", "router", ".", "allow_migrate_model", "(", "self", ".", "connection", ".", "alias", ",", "model", ")", ")", ":", "queryset", "=", "model", ".", "_base_manager", ".", "using", "(", "self", ".", "connection", ".", "alias", ",", ")", ".", "order_by", "(", "model", ".", "_meta", ".", "pk", ".", "name", ")", "yield", "from", "queryset", ".", "iterator", "(", ")", "# Serialize to a string", "out", "=", "StringIO", "(", ")", "serializers", ".", "serialize", "(", "\"json\"", ",", "get_objects", "(", ")", ",", "indent", "=", "None", ",", "stream", "=", "out", ")", "return", "out", ".", "getvalue", "(", ")" ]
[ 108, 4 ]
[ 136, 29 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.deserialize_db_from_string
(self, data)
Reload the database with data from a string generated by the serialize_db_to_string() method.
Reload the database with data from a string generated by the serialize_db_to_string() method.
def deserialize_db_from_string(self, data): """ Reload the database with data from a string generated by the serialize_db_to_string() method. """ data = StringIO(data) table_names = set() # Load data in a transaction to handle forward references and cycles. with atomic(using=self.connection.alias): # Disable constraint checks, because some databases (MySQL) doesn't # support deferred checks. with self.connection.constraint_checks_disabled(): for obj in serializers.deserialize('json', data, using=self.connection.alias): obj.save() table_names.add(obj.object.__class__._meta.db_table) # Manually check for any invalid keys that might have been added, # because constraint checks were disabled. self.connection.check_constraints(table_names=table_names)
[ "def", "deserialize_db_from_string", "(", "self", ",", "data", ")", ":", "data", "=", "StringIO", "(", "data", ")", "table_names", "=", "set", "(", ")", "# Load data in a transaction to handle forward references and cycles.", "with", "atomic", "(", "using", "=", "self", ".", "connection", ".", "alias", ")", ":", "# Disable constraint checks, because some databases (MySQL) doesn't", "# support deferred checks.", "with", "self", ".", "connection", ".", "constraint_checks_disabled", "(", ")", ":", "for", "obj", "in", "serializers", ".", "deserialize", "(", "'json'", ",", "data", ",", "using", "=", "self", ".", "connection", ".", "alias", ")", ":", "obj", ".", "save", "(", ")", "table_names", ".", "add", "(", "obj", ".", "object", ".", "__class__", ".", "_meta", ".", "db_table", ")", "# Manually check for any invalid keys that might have been added,", "# because constraint checks were disabled.", "self", ".", "connection", ".", "check_constraints", "(", "table_names", "=", "table_names", ")" ]
[ 138, 4 ]
[ 155, 70 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation._get_database_display_str
(self, verbosity, database_name)
Return display string for a database for use in various actions.
Return display string for a database for use in various actions.
def _get_database_display_str(self, verbosity, database_name): """ Return display string for a database for use in various actions. """ return "'%s'%s" % ( self.connection.alias, (" ('%s')" % database_name) if verbosity >= 2 else '', )
[ "def", "_get_database_display_str", "(", "self", ",", "verbosity", ",", "database_name", ")", ":", "return", "\"'%s'%s\"", "%", "(", "self", ".", "connection", ".", "alias", ",", "(", "\" ('%s')\"", "%", "database_name", ")", "if", "verbosity", ">=", "2", "else", "''", ",", ")" ]
[ 157, 4 ]
[ 164, 9 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation._get_test_db_name
(self)
Internal implementation - return the name of the test DB that will be created. Only useful when called from create_test_db() and _create_test_db() and when no external munging is done with the 'NAME' settings.
Internal implementation - return the name of the test DB that will be created. Only useful when called from create_test_db() and _create_test_db() and when no external munging is done with the 'NAME' settings.
def _get_test_db_name(self): """ Internal implementation - return the name of the test DB that will be created. Only useful when called from create_test_db() and _create_test_db() and when no external munging is done with the 'NAME' settings. """ if self.connection.settings_dict['TEST']['NAME']: return self.connection.settings_dict['TEST']['NAME'] return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
[ "def", "_get_test_db_name", "(", "self", ")", ":", "if", "self", ".", "connection", ".", "settings_dict", "[", "'TEST'", "]", "[", "'NAME'", "]", ":", "return", "self", ".", "connection", ".", "settings_dict", "[", "'TEST'", "]", "[", "'NAME'", "]", "return", "TEST_DATABASE_PREFIX", "+", "self", ".", "connection", ".", "settings_dict", "[", "'NAME'", "]" ]
[ 166, 4 ]
[ 175, 75 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation._create_test_db
(self, verbosity, autoclobber, keepdb=False)
Internal implementation - create the test db tables.
Internal implementation - create the test db tables.
def _create_test_db(self, verbosity, autoclobber, keepdb=False): """ Internal implementation - create the test db tables. """ test_database_name = self._get_test_db_name() test_db_params = { 'dbname': self.connection.ops.quote_name(test_database_name), 'suffix': self.sql_table_creation_suffix(), } # Create the test database and connect to it. with self._nodb_cursor() as cursor: try: self._execute_create_test_db(cursor, test_db_params, keepdb) except Exception as e: # if we want to keep the db, then no need to do any of the below, # just return and skip it all. if keepdb: return test_database_name self.log('Got an error creating the test database: %s' % e) if not autoclobber: confirm = input( "Type 'yes' if you would like to try deleting the test " "database '%s', or 'no' to cancel: " % test_database_name) if autoclobber or confirm == 'yes': try: if verbosity >= 1: self.log('Destroying old test database for alias %s...' % ( self._get_database_display_str(verbosity, test_database_name), )) cursor.execute('DROP DATABASE %(dbname)s' % test_db_params) self._execute_create_test_db(cursor, test_db_params, keepdb) except Exception as e: self.log('Got an error recreating the test database: %s' % e) sys.exit(2) else: self.log('Tests cancelled.') sys.exit(1) return test_database_name
[ "def", "_create_test_db", "(", "self", ",", "verbosity", ",", "autoclobber", ",", "keepdb", "=", "False", ")", ":", "test_database_name", "=", "self", ".", "_get_test_db_name", "(", ")", "test_db_params", "=", "{", "'dbname'", ":", "self", ".", "connection", ".", "ops", ".", "quote_name", "(", "test_database_name", ")", ",", "'suffix'", ":", "self", ".", "sql_table_creation_suffix", "(", ")", ",", "}", "# Create the test database and connect to it.", "with", "self", ".", "_nodb_cursor", "(", ")", "as", "cursor", ":", "try", ":", "self", ".", "_execute_create_test_db", "(", "cursor", ",", "test_db_params", ",", "keepdb", ")", "except", "Exception", "as", "e", ":", "# if we want to keep the db, then no need to do any of the below,", "# just return and skip it all.", "if", "keepdb", ":", "return", "test_database_name", "self", ".", "log", "(", "'Got an error creating the test database: %s'", "%", "e", ")", "if", "not", "autoclobber", ":", "confirm", "=", "input", "(", "\"Type 'yes' if you would like to try deleting the test \"", "\"database '%s', or 'no' to cancel: \"", "%", "test_database_name", ")", "if", "autoclobber", "or", "confirm", "==", "'yes'", ":", "try", ":", "if", "verbosity", ">=", "1", ":", "self", ".", "log", "(", "'Destroying old test database for alias %s...'", "%", "(", "self", ".", "_get_database_display_str", "(", "verbosity", ",", "test_database_name", ")", ",", ")", ")", "cursor", ".", "execute", "(", "'DROP DATABASE %(dbname)s'", "%", "test_db_params", ")", "self", ".", "_execute_create_test_db", "(", "cursor", ",", "test_db_params", ",", "keepdb", ")", "except", "Exception", "as", "e", ":", "self", ".", "log", "(", "'Got an error recreating the test database: %s'", "%", "e", ")", "sys", ".", "exit", "(", "2", ")", "else", ":", "self", ".", "log", "(", "'Tests cancelled.'", ")", "sys", ".", "exit", "(", "1", ")", "return", "test_database_name" ]
[ 180, 4 ]
[ 219, 33 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.clone_test_db
(self, suffix, verbosity=1, autoclobber=False, keepdb=False)
Clone a test database.
Clone a test database.
def clone_test_db(self, suffix, verbosity=1, autoclobber=False, keepdb=False): """ Clone a test database. """ source_database_name = self.connection.settings_dict['NAME'] if verbosity >= 1: action = 'Cloning test database' if keepdb: action = 'Using existing clone' self.log('%s for alias %s...' % ( action, self._get_database_display_str(verbosity, source_database_name), )) # We could skip this call if keepdb is True, but we instead # give it the keepdb param. See create_test_db for details. self._clone_test_db(suffix, verbosity, keepdb)
[ "def", "clone_test_db", "(", "self", ",", "suffix", ",", "verbosity", "=", "1", ",", "autoclobber", "=", "False", ",", "keepdb", "=", "False", ")", ":", "source_database_name", "=", "self", ".", "connection", ".", "settings_dict", "[", "'NAME'", "]", "if", "verbosity", ">=", "1", ":", "action", "=", "'Cloning test database'", "if", "keepdb", ":", "action", "=", "'Using existing clone'", "self", ".", "log", "(", "'%s for alias %s...'", "%", "(", "action", ",", "self", ".", "_get_database_display_str", "(", "verbosity", ",", "source_database_name", ")", ",", ")", ")", "# We could skip this call if keepdb is True, but we instead", "# give it the keepdb param. See create_test_db for details.", "self", ".", "_clone_test_db", "(", "suffix", ",", "verbosity", ",", "keepdb", ")" ]
[ 221, 4 ]
[ 238, 54 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.get_test_db_clone_settings
(self, suffix)
Return a modified connection settings dict for the n-th clone of a DB.
Return a modified connection settings dict for the n-th clone of a DB.
def get_test_db_clone_settings(self, suffix): """ Return a modified connection settings dict for the n-th clone of a DB. """ # When this function is called, the test database has been created # already and its name has been copied to settings_dict['NAME'] so # we don't need to call _get_test_db_name. orig_settings_dict = self.connection.settings_dict return {**orig_settings_dict, 'NAME': '{}_{}'.format(orig_settings_dict['NAME'], suffix)}
[ "def", "get_test_db_clone_settings", "(", "self", ",", "suffix", ")", ":", "# When this function is called, the test database has been created", "# already and its name has been copied to settings_dict['NAME'] so", "# we don't need to call _get_test_db_name.", "orig_settings_dict", "=", "self", ".", "connection", ".", "settings_dict", "return", "{", "*", "*", "orig_settings_dict", ",", "'NAME'", ":", "'{}_{}'", ".", "format", "(", "orig_settings_dict", "[", "'NAME'", "]", ",", "suffix", ")", "}" ]
[ 240, 4 ]
[ 248, 97 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation._clone_test_db
(self, suffix, verbosity, keepdb=False)
Internal implementation - duplicate the test db tables.
Internal implementation - duplicate the test db tables.
def _clone_test_db(self, suffix, verbosity, keepdb=False): """ Internal implementation - duplicate the test db tables. """ raise NotImplementedError( "The database backend doesn't support cloning databases. " "Disable the option to run tests in parallel processes.")
[ "def", "_clone_test_db", "(", "self", ",", "suffix", ",", "verbosity", ",", "keepdb", "=", "False", ")", ":", "raise", "NotImplementedError", "(", "\"The database backend doesn't support cloning databases. \"", "\"Disable the option to run tests in parallel processes.\"", ")" ]
[ 250, 4 ]
[ 256, 69 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.destroy_test_db
(self, old_database_name=None, verbosity=1, keepdb=False, suffix=None)
Destroy a test database, prompting the user for confirmation if the database already exists.
Destroy a test database, prompting the user for confirmation if the database already exists.
def destroy_test_db(self, old_database_name=None, verbosity=1, keepdb=False, suffix=None): """ Destroy a test database, prompting the user for confirmation if the database already exists. """ self.connection.close() if suffix is None: test_database_name = self.connection.settings_dict['NAME'] else: test_database_name = self.get_test_db_clone_settings(suffix)['NAME'] if verbosity >= 1: action = 'Destroying' if keepdb: action = 'Preserving' self.log('%s test database for alias %s...' % ( action, self._get_database_display_str(verbosity, test_database_name), )) # if we want to preserve the database # skip the actual destroying piece. if not keepdb: self._destroy_test_db(test_database_name, verbosity) # Restore the original database name if old_database_name is not None: settings.DATABASES[self.connection.alias]["NAME"] = old_database_name self.connection.settings_dict["NAME"] = old_database_name
[ "def", "destroy_test_db", "(", "self", ",", "old_database_name", "=", "None", ",", "verbosity", "=", "1", ",", "keepdb", "=", "False", ",", "suffix", "=", "None", ")", ":", "self", ".", "connection", ".", "close", "(", ")", "if", "suffix", "is", "None", ":", "test_database_name", "=", "self", ".", "connection", ".", "settings_dict", "[", "'NAME'", "]", "else", ":", "test_database_name", "=", "self", ".", "get_test_db_clone_settings", "(", "suffix", ")", "[", "'NAME'", "]", "if", "verbosity", ">=", "1", ":", "action", "=", "'Destroying'", "if", "keepdb", ":", "action", "=", "'Preserving'", "self", ".", "log", "(", "'%s test database for alias %s...'", "%", "(", "action", ",", "self", ".", "_get_database_display_str", "(", "verbosity", ",", "test_database_name", ")", ",", ")", ")", "# if we want to preserve the database", "# skip the actual destroying piece.", "if", "not", "keepdb", ":", "self", ".", "_destroy_test_db", "(", "test_database_name", ",", "verbosity", ")", "# Restore the original database name", "if", "old_database_name", "is", "not", "None", ":", "settings", ".", "DATABASES", "[", "self", ".", "connection", ".", "alias", "]", "[", "\"NAME\"", "]", "=", "old_database_name", "self", ".", "connection", ".", "settings_dict", "[", "\"NAME\"", "]", "=", "old_database_name" ]
[ 258, 4 ]
[ 286, 69 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation._destroy_test_db
(self, test_database_name, verbosity)
Internal implementation - remove the test db tables.
Internal implementation - remove the test db tables.
def _destroy_test_db(self, test_database_name, verbosity): """ Internal implementation - remove the test db tables. """ # Remove the test database to clean up after # ourselves. Connect to the previous database (not the test database) # to do so, because it's not allowed to delete a database while being # connected to it. with self._nodb_cursor() as cursor: cursor.execute("DROP DATABASE %s" % self.connection.ops.quote_name(test_database_name))
[ "def", "_destroy_test_db", "(", "self", ",", "test_database_name", ",", "verbosity", ")", ":", "# Remove the test database to clean up after", "# ourselves. Connect to the previous database (not the test database)", "# to do so, because it's not allowed to delete a database while being", "# connected to it.", "with", "self", ".", "_nodb_cursor", "(", ")", "as", "cursor", ":", "cursor", ".", "execute", "(", "\"DROP DATABASE %s\"", "%", "self", ".", "connection", ".", "ops", ".", "quote_name", "(", "test_database_name", ")", ")" ]
[ 288, 4 ]
[ 298, 80 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.mark_expected_failures_and_skips
(self)
Mark tests in Django's test suite which are expected failures on this database and test which should be skipped on this database.
Mark tests in Django's test suite which are expected failures on this database and test which should be skipped on this database.
def mark_expected_failures_and_skips(self): """ Mark tests in Django's test suite which are expected failures on this database and test which should be skipped on this database. """ for test_name in self.connection.features.django_test_expected_failures: test_case_name, _, test_method_name = test_name.rpartition('.') test_app = test_name.split('.')[0] # Importing a test app that isn't installed raises RuntimeError. if test_app in settings.INSTALLED_APPS: test_case = import_string(test_case_name) test_method = getattr(test_case, test_method_name) setattr(test_case, test_method_name, expectedFailure(test_method)) for reason, tests in self.connection.features.django_test_skips.items(): for test_name in tests: test_case_name, _, test_method_name = test_name.rpartition('.') test_app = test_name.split('.')[0] # Importing a test app that isn't installed raises RuntimeError. if test_app in settings.INSTALLED_APPS: test_case = import_string(test_case_name) test_method = getattr(test_case, test_method_name) setattr(test_case, test_method_name, skip(reason)(test_method))
[ "def", "mark_expected_failures_and_skips", "(", "self", ")", ":", "for", "test_name", "in", "self", ".", "connection", ".", "features", ".", "django_test_expected_failures", ":", "test_case_name", ",", "_", ",", "test_method_name", "=", "test_name", ".", "rpartition", "(", "'.'", ")", "test_app", "=", "test_name", ".", "split", "(", "'.'", ")", "[", "0", "]", "# Importing a test app that isn't installed raises RuntimeError.", "if", "test_app", "in", "settings", ".", "INSTALLED_APPS", ":", "test_case", "=", "import_string", "(", "test_case_name", ")", "test_method", "=", "getattr", "(", "test_case", ",", "test_method_name", ")", "setattr", "(", "test_case", ",", "test_method_name", ",", "expectedFailure", "(", "test_method", ")", ")", "for", "reason", ",", "tests", "in", "self", ".", "connection", ".", "features", ".", "django_test_skips", ".", "items", "(", ")", ":", "for", "test_name", "in", "tests", ":", "test_case_name", ",", "_", ",", "test_method_name", "=", "test_name", ".", "rpartition", "(", "'.'", ")", "test_app", "=", "test_name", ".", "split", "(", "'.'", ")", "[", "0", "]", "# Importing a test app that isn't installed raises RuntimeError.", "if", "test_app", "in", "settings", ".", "INSTALLED_APPS", ":", "test_case", "=", "import_string", "(", "test_case_name", ")", "test_method", "=", "getattr", "(", "test_case", ",", "test_method_name", ")", "setattr", "(", "test_case", ",", "test_method_name", ",", "skip", "(", "reason", ")", "(", "test_method", ")", ")" ]
[ 300, 4 ]
[ 321, 83 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.sql_table_creation_suffix
(self)
SQL to append to the end of the test table creation statements.
SQL to append to the end of the test table creation statements.
def sql_table_creation_suffix(self): """ SQL to append to the end of the test table creation statements. """ return ''
[ "def", "sql_table_creation_suffix", "(", "self", ")", ":", "return", "''" ]
[ 323, 4 ]
[ 327, 17 ]
python
en
['en', 'error', 'th']
False
BaseDatabaseCreation.test_db_signature
(self)
Return a tuple with elements of self.connection.settings_dict (a DATABASES setting value) that uniquely identify a database accordingly to the RDBMS particularities.
Return a tuple with elements of self.connection.settings_dict (a DATABASES setting value) that uniquely identify a database accordingly to the RDBMS particularities.
def test_db_signature(self): """ Return a tuple with elements of self.connection.settings_dict (a DATABASES setting value) that uniquely identify a database accordingly to the RDBMS particularities. """ settings_dict = self.connection.settings_dict return ( settings_dict['HOST'], settings_dict['PORT'], settings_dict['ENGINE'], self._get_test_db_name(), )
[ "def", "test_db_signature", "(", "self", ")", ":", "settings_dict", "=", "self", ".", "connection", ".", "settings_dict", "return", "(", "settings_dict", "[", "'HOST'", "]", ",", "settings_dict", "[", "'PORT'", "]", ",", "settings_dict", "[", "'ENGINE'", "]", ",", "self", ".", "_get_test_db_name", "(", ")", ",", ")" ]
[ 329, 4 ]
[ 341, 9 ]
python
en
['en', 'error', 'th']
False
run
(argv=None)
The main function which creates the pipeline and runs it.
The main function which creates the pipeline and runs it.
def run(argv=None): """The main function which creates the pipeline and runs it.""" parser = argparse.ArgumentParser() # Add the arguments needed for this specific Dataflow job. parser.add_argument( '--input', dest='input', required=True, help='Input file to read. This can be a local file or ' 'a file in a Google Storage Bucket.') parser.add_argument('--output', dest='output', required=True, help='Output BQ table to write results to.') parser.add_argument('--delimiter', dest='delimiter', required=False, help='Delimiter to split input records.', default=',') parser.add_argument('--fields', dest='fields', required=True, help='Comma separated list of field names.') parser.add_argument('--load_dt', dest='load_dt', required=True, help='Load date in YYYY-MM-DD format.') known_args, pipeline_args = parser.parse_known_args(argv) row_transformer = RowTransformer(delimiter=known_args.delimiter, header=known_args.fields, filename=ntpath.basename(known_args.input), load_dt=known_args.load_dt) p_opts = pipeline_options.PipelineOptions(pipeline_args) # Initiate the pipeline using the pipeline arguments passed in from the # command line. This includes information including where Dataflow should # store temp files, and what the project id is. with beam.Pipeline(options=p_opts) as pipeline: # Read the file. This is the source of the pipeline. All further # processing starts with lines read from the file. We use the input # argument from the command line. rows = pipeline | "Read from text file" >> beam.io.ReadFromText(known_args.input) # This stage of the pipeline translates from a delimited single row # input to a dictionary object consumable by BigQuery. # It refers to a function we have written. This function will # be run in parallel on different workers using input from the # previous stage of the pipeline. dict_records = rows | "Convert to BigQuery row" >> beam.Map( lambda r: row_transformer.parse(r)) # This stage of the pipeline writes the dictionary records into # an existing BigQuery table. dict_records | "Write to BigQuery" >> beam.io.Write( beam.io.BigQuerySink(known_args.output, create_disposition=beam.io.BigQueryDisposition.CREATE_NEVER, write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND))
[ "def", "run", "(", "argv", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "# Add the arguments needed for this specific Dataflow job.", "parser", ".", "add_argument", "(", "'--input'", ",", "dest", "=", "'input'", ",", "required", "=", "True", ",", "help", "=", "'Input file to read. This can be a local file or '", "'a file in a Google Storage Bucket.'", ")", "parser", ".", "add_argument", "(", "'--output'", ",", "dest", "=", "'output'", ",", "required", "=", "True", ",", "help", "=", "'Output BQ table to write results to.'", ")", "parser", ".", "add_argument", "(", "'--delimiter'", ",", "dest", "=", "'delimiter'", ",", "required", "=", "False", ",", "help", "=", "'Delimiter to split input records.'", ",", "default", "=", "','", ")", "parser", ".", "add_argument", "(", "'--fields'", ",", "dest", "=", "'fields'", ",", "required", "=", "True", ",", "help", "=", "'Comma separated list of field names.'", ")", "parser", ".", "add_argument", "(", "'--load_dt'", ",", "dest", "=", "'load_dt'", ",", "required", "=", "True", ",", "help", "=", "'Load date in YYYY-MM-DD format.'", ")", "known_args", ",", "pipeline_args", "=", "parser", ".", "parse_known_args", "(", "argv", ")", "row_transformer", "=", "RowTransformer", "(", "delimiter", "=", "known_args", ".", "delimiter", ",", "header", "=", "known_args", ".", "fields", ",", "filename", "=", "ntpath", ".", "basename", "(", "known_args", ".", "input", ")", ",", "load_dt", "=", "known_args", ".", "load_dt", ")", "p_opts", "=", "pipeline_options", ".", "PipelineOptions", "(", "pipeline_args", ")", "# Initiate the pipeline using the pipeline arguments passed in from the", "# command line. This includes information including where Dataflow should", "# store temp files, and what the project id is.", "with", "beam", ".", "Pipeline", "(", "options", "=", "p_opts", ")", "as", "pipeline", ":", "# Read the file. This is the source of the pipeline. All further", "# processing starts with lines read from the file. We use the input", "# argument from the command line.", "rows", "=", "pipeline", "|", "\"Read from text file\"", ">>", "beam", ".", "io", ".", "ReadFromText", "(", "known_args", ".", "input", ")", "# This stage of the pipeline translates from a delimited single row", "# input to a dictionary object consumable by BigQuery.", "# It refers to a function we have written. This function will", "# be run in parallel on different workers using input from the", "# previous stage of the pipeline.", "dict_records", "=", "rows", "|", "\"Convert to BigQuery row\"", ">>", "beam", ".", "Map", "(", "lambda", "r", ":", "row_transformer", ".", "parse", "(", "r", ")", ")", "# This stage of the pipeline writes the dictionary records into", "# an existing BigQuery table.", "dict_records", "|", "\"Write to BigQuery\"", ">>", "beam", ".", "io", ".", "Write", "(", "beam", ".", "io", ".", "BigQuerySink", "(", "known_args", ".", "output", ",", "create_disposition", "=", "beam", ".", "io", ".", "BigQueryDisposition", ".", "CREATE_NEVER", ",", "write_disposition", "=", "beam", ".", "io", ".", "BigQueryDisposition", ".", "WRITE_APPEND", ")", ")" ]
[ 61, 0 ]
[ 114, 93 ]
python
en
['en', 'en', 'en']
True
RowTransformer.parse
(self, row)
This method translates a single delimited record into a dictionary which can be loaded into BigQuery. It also adds filename and load_dt fields to the dictionary.
This method translates a single delimited record into a dictionary which can be loaded into BigQuery. It also adds filename and load_dt fields to the dictionary.
def parse(self, row): """This method translates a single delimited record into a dictionary which can be loaded into BigQuery. It also adds filename and load_dt fields to the dictionary.""" # Strip out the return characters and quote characters. values = re.split(self.delimiter, re.sub(r'[\r\n"]', '', row)) row = dict(zip(self.keys, values)) # Add an additional filename field. row['filename'] = self.filename # Add an additional load_dt field. row['load_dt'] = self.load_dt return row
[ "def", "parse", "(", "self", ",", "row", ")", ":", "# Strip out the return characters and quote characters.", "values", "=", "re", ".", "split", "(", "self", ".", "delimiter", ",", "re", ".", "sub", "(", "r'[\\r\\n\"]'", ",", "''", ",", "row", ")", ")", "row", "=", "dict", "(", "zip", "(", "self", ".", "keys", ",", "values", ")", ")", "# Add an additional filename field.", "row", "[", "'filename'", "]", "=", "self", ".", "filename", "# Add an additional load_dt field.", "row", "[", "'load_dt'", "]", "=", "self", ".", "load_dt", "return", "row" ]
[ 42, 4 ]
[ 58, 18 ]
python
en
['en', 'en', 'en']
True
BmpImageFile._bitmap
(self, header=0, offset=0)
Read relevant info about the BMP
Read relevant info about the BMP
def _bitmap(self, header=0, offset=0): """ Read relevant info about the BMP """ read, seek = self.fp.read, self.fp.seek if header: seek(header) file_info = {} # read bmp header size @offset 14 (this is part of the header size) file_info["header_size"] = i32(read(4)) file_info["direction"] = -1 # -------------------- If requested, read header at a specific position # read the rest of the bmp header, without its size header_data = ImageFile._safe_read(self.fp, file_info["header_size"] - 4) # -------------------------------------------------- IBM OS/2 Bitmap v1 # ----- This format has different offsets because of width/height types if file_info["header_size"] == 12: file_info["width"] = i16(header_data, 0) file_info["height"] = i16(header_data, 2) file_info["planes"] = i16(header_data, 4) file_info["bits"] = i16(header_data, 6) file_info["compression"] = self.RAW file_info["palette_padding"] = 3 # --------------------------------------------- Windows Bitmap v2 to v5 # v3, OS/2 v2, v4, v5 elif file_info["header_size"] in (40, 64, 108, 124): file_info["y_flip"] = header_data[7] == 0xFF file_info["direction"] = 1 if file_info["y_flip"] else -1 file_info["width"] = i32(header_data, 0) file_info["height"] = ( i32(header_data, 4) if not file_info["y_flip"] else 2 ** 32 - i32(header_data, 4) ) file_info["planes"] = i16(header_data, 8) file_info["bits"] = i16(header_data, 10) file_info["compression"] = i32(header_data, 12) # byte size of pixel data file_info["data_size"] = i32(header_data, 16) file_info["pixels_per_meter"] = ( i32(header_data, 20), i32(header_data, 24), ) file_info["colors"] = i32(header_data, 28) file_info["palette_padding"] = 4 self.info["dpi"] = tuple(x / 39.3701 for x in file_info["pixels_per_meter"]) if file_info["compression"] == self.BITFIELDS: if len(header_data) >= 52: for idx, mask in enumerate( ["r_mask", "g_mask", "b_mask", "a_mask"] ): file_info[mask] = i32(header_data, 36 + idx * 4) else: # 40 byte headers only have the three components in the # bitfields masks, ref: # https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx # See also # https://github.com/python-pillow/Pillow/issues/1293 # There is a 4th component in the RGBQuad, in the alpha # location, but it is listed as a reserved component, # and it is not generally an alpha channel file_info["a_mask"] = 0x0 for mask in ["r_mask", "g_mask", "b_mask"]: file_info[mask] = i32(read(4)) file_info["rgb_mask"] = ( file_info["r_mask"], file_info["g_mask"], file_info["b_mask"], ) file_info["rgba_mask"] = ( file_info["r_mask"], file_info["g_mask"], file_info["b_mask"], file_info["a_mask"], ) else: raise OSError(f"Unsupported BMP header type ({file_info['header_size']})") # ------------------ Special case : header is reported 40, which # ---------------------- is shorter than real size for bpp >= 16 self._size = file_info["width"], file_info["height"] # ------- If color count was not found in the header, compute from bits file_info["colors"] = ( file_info["colors"] if file_info.get("colors", 0) else (1 << file_info["bits"]) ) # ---------------------- Check bit depth for unusual unsupported values self.mode, raw_mode = BIT2MODE.get(file_info["bits"], (None, None)) if self.mode is None: raise OSError(f"Unsupported BMP pixel depth ({file_info['bits']})") # ---------------- Process BMP with Bitfields compression (not palette) if file_info["compression"] == self.BITFIELDS: SUPPORTED = { 32: [ (0xFF0000, 0xFF00, 0xFF, 0x0), (0xFF0000, 0xFF00, 0xFF, 0xFF000000), (0xFF, 0xFF00, 0xFF0000, 0xFF000000), (0x0, 0x0, 0x0, 0x0), (0xFF000000, 0xFF0000, 0xFF00, 0x0), ], 24: [(0xFF0000, 0xFF00, 0xFF)], 16: [(0xF800, 0x7E0, 0x1F), (0x7C00, 0x3E0, 0x1F)], } MASK_MODES = { (32, (0xFF0000, 0xFF00, 0xFF, 0x0)): "BGRX", (32, (0xFF000000, 0xFF0000, 0xFF00, 0x0)): "XBGR", (32, (0xFF, 0xFF00, 0xFF0000, 0xFF000000)): "RGBA", (32, (0xFF0000, 0xFF00, 0xFF, 0xFF000000)): "BGRA", (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", (24, (0xFF0000, 0xFF00, 0xFF)): "BGR", (16, (0xF800, 0x7E0, 0x1F)): "BGR;16", (16, (0x7C00, 0x3E0, 0x1F)): "BGR;15", } if file_info["bits"] in SUPPORTED: if ( file_info["bits"] == 32 and file_info["rgba_mask"] in SUPPORTED[file_info["bits"]] ): raw_mode = MASK_MODES[(file_info["bits"], file_info["rgba_mask"])] self.mode = "RGBA" if "A" in raw_mode else self.mode elif ( file_info["bits"] in (24, 16) and file_info["rgb_mask"] in SUPPORTED[file_info["bits"]] ): raw_mode = MASK_MODES[(file_info["bits"], file_info["rgb_mask"])] else: raise OSError("Unsupported BMP bitfields layout") else: raise OSError("Unsupported BMP bitfields layout") elif file_info["compression"] == self.RAW: if file_info["bits"] == 32 and header == 22: # 32-bit .cur offset raw_mode, self.mode = "BGRA", "RGBA" else: raise OSError(f"Unsupported BMP compression ({file_info['compression']})") # --------------- Once the header is processed, process the palette/LUT if self.mode == "P": # Paletted for 1, 4 and 8 bit images # ---------------------------------------------------- 1-bit images if not (0 < file_info["colors"] <= 65536): raise OSError(f"Unsupported BMP Palette size ({file_info['colors']})") else: padding = file_info["palette_padding"] palette = read(padding * file_info["colors"]) greyscale = True indices = ( (0, 255) if file_info["colors"] == 2 else list(range(file_info["colors"])) ) # ----------------- Check if greyscale and ignore palette if so for ind, val in enumerate(indices): rgb = palette[ind * padding : ind * padding + 3] if rgb != o8(val) * 3: greyscale = False # ------- If all colors are grey, white or black, ditch palette if greyscale: self.mode = "1" if file_info["colors"] == 2 else "L" raw_mode = self.mode else: self.mode = "P" self.palette = ImagePalette.raw( "BGRX" if padding == 4 else "BGR", palette ) # ---------------------------- Finally set the tile data for the plugin self.info["compression"] = file_info["compression"] self.tile = [ ( "raw", (0, 0, file_info["width"], file_info["height"]), offset or self.fp.tell(), ( raw_mode, ((file_info["width"] * file_info["bits"] + 31) >> 3) & (~3), file_info["direction"], ), ) ]
[ "def", "_bitmap", "(", "self", ",", "header", "=", "0", ",", "offset", "=", "0", ")", ":", "read", ",", "seek", "=", "self", ".", "fp", ".", "read", ",", "self", ".", "fp", ".", "seek", "if", "header", ":", "seek", "(", "header", ")", "file_info", "=", "{", "}", "# read bmp header size @offset 14 (this is part of the header size)", "file_info", "[", "\"header_size\"", "]", "=", "i32", "(", "read", "(", "4", ")", ")", "file_info", "[", "\"direction\"", "]", "=", "-", "1", "# -------------------- If requested, read header at a specific position", "# read the rest of the bmp header, without its size", "header_data", "=", "ImageFile", ".", "_safe_read", "(", "self", ".", "fp", ",", "file_info", "[", "\"header_size\"", "]", "-", "4", ")", "# -------------------------------------------------- IBM OS/2 Bitmap v1", "# ----- This format has different offsets because of width/height types", "if", "file_info", "[", "\"header_size\"", "]", "==", "12", ":", "file_info", "[", "\"width\"", "]", "=", "i16", "(", "header_data", ",", "0", ")", "file_info", "[", "\"height\"", "]", "=", "i16", "(", "header_data", ",", "2", ")", "file_info", "[", "\"planes\"", "]", "=", "i16", "(", "header_data", ",", "4", ")", "file_info", "[", "\"bits\"", "]", "=", "i16", "(", "header_data", ",", "6", ")", "file_info", "[", "\"compression\"", "]", "=", "self", ".", "RAW", "file_info", "[", "\"palette_padding\"", "]", "=", "3", "# --------------------------------------------- Windows Bitmap v2 to v5", "# v3, OS/2 v2, v4, v5", "elif", "file_info", "[", "\"header_size\"", "]", "in", "(", "40", ",", "64", ",", "108", ",", "124", ")", ":", "file_info", "[", "\"y_flip\"", "]", "=", "header_data", "[", "7", "]", "==", "0xFF", "file_info", "[", "\"direction\"", "]", "=", "1", "if", "file_info", "[", "\"y_flip\"", "]", "else", "-", "1", "file_info", "[", "\"width\"", "]", "=", "i32", "(", "header_data", ",", "0", ")", "file_info", "[", "\"height\"", "]", "=", "(", "i32", "(", "header_data", ",", "4", ")", "if", "not", "file_info", "[", "\"y_flip\"", "]", "else", "2", "**", "32", "-", "i32", "(", "header_data", ",", "4", ")", ")", "file_info", "[", "\"planes\"", "]", "=", "i16", "(", "header_data", ",", "8", ")", "file_info", "[", "\"bits\"", "]", "=", "i16", "(", "header_data", ",", "10", ")", "file_info", "[", "\"compression\"", "]", "=", "i32", "(", "header_data", ",", "12", ")", "# byte size of pixel data", "file_info", "[", "\"data_size\"", "]", "=", "i32", "(", "header_data", ",", "16", ")", "file_info", "[", "\"pixels_per_meter\"", "]", "=", "(", "i32", "(", "header_data", ",", "20", ")", ",", "i32", "(", "header_data", ",", "24", ")", ",", ")", "file_info", "[", "\"colors\"", "]", "=", "i32", "(", "header_data", ",", "28", ")", "file_info", "[", "\"palette_padding\"", "]", "=", "4", "self", ".", "info", "[", "\"dpi\"", "]", "=", "tuple", "(", "x", "/", "39.3701", "for", "x", "in", "file_info", "[", "\"pixels_per_meter\"", "]", ")", "if", "file_info", "[", "\"compression\"", "]", "==", "self", ".", "BITFIELDS", ":", "if", "len", "(", "header_data", ")", ">=", "52", ":", "for", "idx", ",", "mask", "in", "enumerate", "(", "[", "\"r_mask\"", ",", "\"g_mask\"", ",", "\"b_mask\"", ",", "\"a_mask\"", "]", ")", ":", "file_info", "[", "mask", "]", "=", "i32", "(", "header_data", ",", "36", "+", "idx", "*", "4", ")", "else", ":", "# 40 byte headers only have the three components in the", "# bitfields masks, ref:", "# https://msdn.microsoft.com/en-us/library/windows/desktop/dd183376(v=vs.85).aspx", "# See also", "# https://github.com/python-pillow/Pillow/issues/1293", "# There is a 4th component in the RGBQuad, in the alpha", "# location, but it is listed as a reserved component,", "# and it is not generally an alpha channel", "file_info", "[", "\"a_mask\"", "]", "=", "0x0", "for", "mask", "in", "[", "\"r_mask\"", ",", "\"g_mask\"", ",", "\"b_mask\"", "]", ":", "file_info", "[", "mask", "]", "=", "i32", "(", "read", "(", "4", ")", ")", "file_info", "[", "\"rgb_mask\"", "]", "=", "(", "file_info", "[", "\"r_mask\"", "]", ",", "file_info", "[", "\"g_mask\"", "]", ",", "file_info", "[", "\"b_mask\"", "]", ",", ")", "file_info", "[", "\"rgba_mask\"", "]", "=", "(", "file_info", "[", "\"r_mask\"", "]", ",", "file_info", "[", "\"g_mask\"", "]", ",", "file_info", "[", "\"b_mask\"", "]", ",", "file_info", "[", "\"a_mask\"", "]", ",", ")", "else", ":", "raise", "OSError", "(", "f\"Unsupported BMP header type ({file_info['header_size']})\"", ")", "# ------------------ Special case : header is reported 40, which", "# ---------------------- is shorter than real size for bpp >= 16", "self", ".", "_size", "=", "file_info", "[", "\"width\"", "]", ",", "file_info", "[", "\"height\"", "]", "# ------- If color count was not found in the header, compute from bits", "file_info", "[", "\"colors\"", "]", "=", "(", "file_info", "[", "\"colors\"", "]", "if", "file_info", ".", "get", "(", "\"colors\"", ",", "0", ")", "else", "(", "1", "<<", "file_info", "[", "\"bits\"", "]", ")", ")", "# ---------------------- Check bit depth for unusual unsupported values", "self", ".", "mode", ",", "raw_mode", "=", "BIT2MODE", ".", "get", "(", "file_info", "[", "\"bits\"", "]", ",", "(", "None", ",", "None", ")", ")", "if", "self", ".", "mode", "is", "None", ":", "raise", "OSError", "(", "f\"Unsupported BMP pixel depth ({file_info['bits']})\"", ")", "# ---------------- Process BMP with Bitfields compression (not palette)", "if", "file_info", "[", "\"compression\"", "]", "==", "self", ".", "BITFIELDS", ":", "SUPPORTED", "=", "{", "32", ":", "[", "(", "0xFF0000", ",", "0xFF00", ",", "0xFF", ",", "0x0", ")", ",", "(", "0xFF0000", ",", "0xFF00", ",", "0xFF", ",", "0xFF000000", ")", ",", "(", "0xFF", ",", "0xFF00", ",", "0xFF0000", ",", "0xFF000000", ")", ",", "(", "0x0", ",", "0x0", ",", "0x0", ",", "0x0", ")", ",", "(", "0xFF000000", ",", "0xFF0000", ",", "0xFF00", ",", "0x0", ")", ",", "]", ",", "24", ":", "[", "(", "0xFF0000", ",", "0xFF00", ",", "0xFF", ")", "]", ",", "16", ":", "[", "(", "0xF800", ",", "0x7E0", ",", "0x1F", ")", ",", "(", "0x7C00", ",", "0x3E0", ",", "0x1F", ")", "]", ",", "}", "MASK_MODES", "=", "{", "(", "32", ",", "(", "0xFF0000", ",", "0xFF00", ",", "0xFF", ",", "0x0", ")", ")", ":", "\"BGRX\"", ",", "(", "32", ",", "(", "0xFF000000", ",", "0xFF0000", ",", "0xFF00", ",", "0x0", ")", ")", ":", "\"XBGR\"", ",", "(", "32", ",", "(", "0xFF", ",", "0xFF00", ",", "0xFF0000", ",", "0xFF000000", ")", ")", ":", "\"RGBA\"", ",", "(", "32", ",", "(", "0xFF0000", ",", "0xFF00", ",", "0xFF", ",", "0xFF000000", ")", ")", ":", "\"BGRA\"", ",", "(", "32", ",", "(", "0x0", ",", "0x0", ",", "0x0", ",", "0x0", ")", ")", ":", "\"BGRA\"", ",", "(", "24", ",", "(", "0xFF0000", ",", "0xFF00", ",", "0xFF", ")", ")", ":", "\"BGR\"", ",", "(", "16", ",", "(", "0xF800", ",", "0x7E0", ",", "0x1F", ")", ")", ":", "\"BGR;16\"", ",", "(", "16", ",", "(", "0x7C00", ",", "0x3E0", ",", "0x1F", ")", ")", ":", "\"BGR;15\"", ",", "}", "if", "file_info", "[", "\"bits\"", "]", "in", "SUPPORTED", ":", "if", "(", "file_info", "[", "\"bits\"", "]", "==", "32", "and", "file_info", "[", "\"rgba_mask\"", "]", "in", "SUPPORTED", "[", "file_info", "[", "\"bits\"", "]", "]", ")", ":", "raw_mode", "=", "MASK_MODES", "[", "(", "file_info", "[", "\"bits\"", "]", ",", "file_info", "[", "\"rgba_mask\"", "]", ")", "]", "self", ".", "mode", "=", "\"RGBA\"", "if", "\"A\"", "in", "raw_mode", "else", "self", ".", "mode", "elif", "(", "file_info", "[", "\"bits\"", "]", "in", "(", "24", ",", "16", ")", "and", "file_info", "[", "\"rgb_mask\"", "]", "in", "SUPPORTED", "[", "file_info", "[", "\"bits\"", "]", "]", ")", ":", "raw_mode", "=", "MASK_MODES", "[", "(", "file_info", "[", "\"bits\"", "]", ",", "file_info", "[", "\"rgb_mask\"", "]", ")", "]", "else", ":", "raise", "OSError", "(", "\"Unsupported BMP bitfields layout\"", ")", "else", ":", "raise", "OSError", "(", "\"Unsupported BMP bitfields layout\"", ")", "elif", "file_info", "[", "\"compression\"", "]", "==", "self", ".", "RAW", ":", "if", "file_info", "[", "\"bits\"", "]", "==", "32", "and", "header", "==", "22", ":", "# 32-bit .cur offset", "raw_mode", ",", "self", ".", "mode", "=", "\"BGRA\"", ",", "\"RGBA\"", "else", ":", "raise", "OSError", "(", "f\"Unsupported BMP compression ({file_info['compression']})\"", ")", "# --------------- Once the header is processed, process the palette/LUT", "if", "self", ".", "mode", "==", "\"P\"", ":", "# Paletted for 1, 4 and 8 bit images", "# ---------------------------------------------------- 1-bit images", "if", "not", "(", "0", "<", "file_info", "[", "\"colors\"", "]", "<=", "65536", ")", ":", "raise", "OSError", "(", "f\"Unsupported BMP Palette size ({file_info['colors']})\"", ")", "else", ":", "padding", "=", "file_info", "[", "\"palette_padding\"", "]", "palette", "=", "read", "(", "padding", "*", "file_info", "[", "\"colors\"", "]", ")", "greyscale", "=", "True", "indices", "=", "(", "(", "0", ",", "255", ")", "if", "file_info", "[", "\"colors\"", "]", "==", "2", "else", "list", "(", "range", "(", "file_info", "[", "\"colors\"", "]", ")", ")", ")", "# ----------------- Check if greyscale and ignore palette if so", "for", "ind", ",", "val", "in", "enumerate", "(", "indices", ")", ":", "rgb", "=", "palette", "[", "ind", "*", "padding", ":", "ind", "*", "padding", "+", "3", "]", "if", "rgb", "!=", "o8", "(", "val", ")", "*", "3", ":", "greyscale", "=", "False", "# ------- If all colors are grey, white or black, ditch palette", "if", "greyscale", ":", "self", ".", "mode", "=", "\"1\"", "if", "file_info", "[", "\"colors\"", "]", "==", "2", "else", "\"L\"", "raw_mode", "=", "self", ".", "mode", "else", ":", "self", ".", "mode", "=", "\"P\"", "self", ".", "palette", "=", "ImagePalette", ".", "raw", "(", "\"BGRX\"", "if", "padding", "==", "4", "else", "\"BGR\"", ",", "palette", ")", "# ---------------------------- Finally set the tile data for the plugin", "self", ".", "info", "[", "\"compression\"", "]", "=", "file_info", "[", "\"compression\"", "]", "self", ".", "tile", "=", "[", "(", "\"raw\"", ",", "(", "0", ",", "0", ",", "file_info", "[", "\"width\"", "]", ",", "file_info", "[", "\"height\"", "]", ")", ",", "offset", "or", "self", ".", "fp", ".", "tell", "(", ")", ",", "(", "raw_mode", ",", "(", "(", "file_info", "[", "\"width\"", "]", "*", "file_info", "[", "\"bits\"", "]", "+", "31", ")", ">>", "3", ")", "&", "(", "~", "3", ")", ",", "file_info", "[", "\"direction\"", "]", ",", ")", ",", ")", "]" ]
[ 71, 4 ]
[ 256, 9 ]
python
en
['en', 'en', 'en']
True
BmpImageFile._open
(self)
Open file, check magic number and read header
Open file, check magic number and read header
def _open(self): """ Open file, check magic number and read header """ # read 14 bytes: magic number, filesize, reserved, header final offset head_data = self.fp.read(14) # choke if the file does not have the required magic bytes if not _accept(head_data): raise SyntaxError("Not a BMP file") # read the start position of the BMP image data (u32) offset = i32(head_data, 10) # load bitmap information (offset=raster info) self._bitmap(offset=offset)
[ "def", "_open", "(", "self", ")", ":", "# read 14 bytes: magic number, filesize, reserved, header final offset", "head_data", "=", "self", ".", "fp", ".", "read", "(", "14", ")", "# choke if the file does not have the required magic bytes", "if", "not", "_accept", "(", "head_data", ")", ":", "raise", "SyntaxError", "(", "\"Not a BMP file\"", ")", "# read the start position of the BMP image data (u32)", "offset", "=", "i32", "(", "head_data", ",", "10", ")", "# load bitmap information (offset=raster info)", "self", ".", "_bitmap", "(", "offset", "=", "offset", ")" ]
[ 258, 4 ]
[ 268, 35 ]
python
en
['en', 'en', 'en']
True
format_for_columns
( pkgs: "_ProcessedDists", options: Values )
Convert the package data into something usable by output_package_listing_columns.
Convert the package data into something usable by output_package_listing_columns.
def format_for_columns( pkgs: "_ProcessedDists", options: Values ) -> Tuple[List[List[str]], List[str]]: """ Convert the package data into something usable by output_package_listing_columns. """ running_outdated = options.outdated # Adjust the header for the `pip list --outdated` case. if running_outdated: header = ["Package", "Version", "Latest", "Type"] else: header = ["Package", "Version"] data = [] if options.verbose >= 1 or any(x.editable for x in pkgs): header.append("Location") if options.verbose >= 1: header.append("Installer") for proj in pkgs: # if we're working on the 'outdated' list, separate out the # latest_version and type row = [proj.raw_name, str(proj.version)] if running_outdated: row.append(str(proj.latest_version)) row.append(proj.latest_filetype) if options.verbose >= 1 or proj.editable: row.append(proj.location or "") if options.verbose >= 1: row.append(proj.installer) data.append(row) return data, header
[ "def", "format_for_columns", "(", "pkgs", ":", "\"_ProcessedDists\"", ",", "options", ":", "Values", ")", "->", "Tuple", "[", "List", "[", "List", "[", "str", "]", "]", ",", "List", "[", "str", "]", "]", ":", "running_outdated", "=", "options", ".", "outdated", "# Adjust the header for the `pip list --outdated` case.", "if", "running_outdated", ":", "header", "=", "[", "\"Package\"", ",", "\"Version\"", ",", "\"Latest\"", ",", "\"Type\"", "]", "else", ":", "header", "=", "[", "\"Package\"", ",", "\"Version\"", "]", "data", "=", "[", "]", "if", "options", ".", "verbose", ">=", "1", "or", "any", "(", "x", ".", "editable", "for", "x", "in", "pkgs", ")", ":", "header", ".", "append", "(", "\"Location\"", ")", "if", "options", ".", "verbose", ">=", "1", ":", "header", ".", "append", "(", "\"Installer\"", ")", "for", "proj", "in", "pkgs", ":", "# if we're working on the 'outdated' list, separate out the", "# latest_version and type", "row", "=", "[", "proj", ".", "raw_name", ",", "str", "(", "proj", ".", "version", ")", "]", "if", "running_outdated", ":", "row", ".", "append", "(", "str", "(", "proj", ".", "latest_version", ")", ")", "row", ".", "append", "(", "proj", ".", "latest_filetype", ")", "if", "options", ".", "verbose", ">=", "1", "or", "proj", ".", "editable", ":", "row", ".", "append", "(", "proj", ".", "location", "or", "\"\"", ")", "if", "options", ".", "verbose", ">=", "1", ":", "row", ".", "append", "(", "proj", ".", "installer", ")", "data", ".", "append", "(", "row", ")", "return", "data", ",", "header" ]
[ 283, 0 ]
[ 319, 23 ]
python
en
['en', 'error', 'th']
False
ListCommand._build_package_finder
( self, options: Values, session: PipSession )
Create a package finder appropriate to this list command.
Create a package finder appropriate to this list command.
def _build_package_finder( self, options: Values, session: PipSession ) -> PackageFinder: """ Create a package finder appropriate to this list command. """ link_collector = LinkCollector.create(session, options=options) # Pass allow_yanked=False to ignore yanked versions. selection_prefs = SelectionPreferences( allow_yanked=False, allow_all_prereleases=options.pre, ) return PackageFinder.create( link_collector=link_collector, selection_prefs=selection_prefs, )
[ "def", "_build_package_finder", "(", "self", ",", "options", ":", "Values", ",", "session", ":", "PipSession", ")", "->", "PackageFinder", ":", "link_collector", "=", "LinkCollector", ".", "create", "(", "session", ",", "options", "=", "options", ")", "# Pass allow_yanked=False to ignore yanked versions.", "selection_prefs", "=", "SelectionPreferences", "(", "allow_yanked", "=", "False", ",", "allow_all_prereleases", "=", "options", ".", "pre", ",", ")", "return", "PackageFinder", ".", "create", "(", "link_collector", "=", "link_collector", ",", "selection_prefs", "=", "selection_prefs", ",", ")" ]
[ 125, 4 ]
[ 142, 9 ]
python
en
['en', 'error', 'th']
False
parse_tag
(tag: str)
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. Returning a set is required due to the possibility that the tag is a compressed tag set.
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
def parse_tag(tag: str) -> FrozenSet[Tag]: """ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances. Returning a set is required due to the possibility that the tag is a compressed tag set. """ tags = set() interpreters, abis, platforms = tag.split("-") for interpreter in interpreters.split("."): for abi in abis.split("."): for platform_ in platforms.split("."): tags.add(Tag(interpreter, abi, platform_)) return frozenset(tags)
[ "def", "parse_tag", "(", "tag", ":", "str", ")", "->", "FrozenSet", "[", "Tag", "]", ":", "tags", "=", "set", "(", ")", "interpreters", ",", "abis", ",", "platforms", "=", "tag", ".", "split", "(", "\"-\"", ")", "for", "interpreter", "in", "interpreters", ".", "split", "(", "\".\"", ")", ":", "for", "abi", "in", "abis", ".", "split", "(", "\".\"", ")", ":", "for", "platform_", "in", "platforms", ".", "split", "(", "\".\"", ")", ":", "tags", ".", "add", "(", "Tag", "(", "interpreter", ",", "abi", ",", "platform_", ")", ")", "return", "frozenset", "(", "tags", ")" ]
[ 95, 0 ]
[ 108, 26 ]
python
en
['en', 'error', 'th']
False
_abi3_applies
(python_version: PythonVersion)
Determine if the Python version supports abi3. PEP 384 was first implemented in Python 3.2.
Determine if the Python version supports abi3.
def _abi3_applies(python_version: PythonVersion) -> bool: """ Determine if the Python version supports abi3. PEP 384 was first implemented in Python 3.2. """ return len(python_version) > 1 and tuple(python_version) >= (3, 2)
[ "def", "_abi3_applies", "(", "python_version", ":", "PythonVersion", ")", "->", "bool", ":", "return", "len", "(", "python_version", ")", ">", "1", "and", "tuple", "(", "python_version", ")", ">=", "(", "3", ",", "2", ")" ]
[ 124, 0 ]
[ 130, 70 ]
python
en
['en', 'error', 'th']
False
cpython_tags
( python_version: Optional[PythonVersion] = None, abis: Optional[Iterable[str]] = None, platforms: Optional[Iterable[str]] = None, *, warn: bool = False, )
Yields the tags for a CPython interpreter. The tags consist of: - cp<python_version>-<abi>-<platform> - cp<python_version>-abi3-<platform> - cp<python_version>-none-<platform> - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2. If python_version only specifies a major version then user-provided ABIs and the 'none' ABItag will be used. If 'abi3' or 'none' are specified in 'abis' then they will be yielded at their normal position and not at the beginning.
Yields the tags for a CPython interpreter.
def cpython_tags( python_version: Optional[PythonVersion] = None, abis: Optional[Iterable[str]] = None, platforms: Optional[Iterable[str]] = None, *, warn: bool = False, ) -> Iterator[Tag]: """ Yields the tags for a CPython interpreter. The tags consist of: - cp<python_version>-<abi>-<platform> - cp<python_version>-abi3-<platform> - cp<python_version>-none-<platform> - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2. If python_version only specifies a major version then user-provided ABIs and the 'none' ABItag will be used. If 'abi3' or 'none' are specified in 'abis' then they will be yielded at their normal position and not at the beginning. """ if not python_version: python_version = sys.version_info[:2] interpreter = "cp{}".format(_version_nodot(python_version[:2])) if abis is None: if len(python_version) > 1: abis = _cpython_abis(python_version, warn) else: abis = [] abis = list(abis) # 'abi3' and 'none' are explicitly handled later. for explicit_abi in ("abi3", "none"): try: abis.remove(explicit_abi) except ValueError: pass platforms = list(platforms or _platform_tags()) for abi in abis: for platform_ in platforms: yield Tag(interpreter, abi, platform_) if _abi3_applies(python_version): yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms) yield from (Tag(interpreter, "none", platform_) for platform_ in platforms) if _abi3_applies(python_version): for minor_version in range(python_version[1] - 1, 1, -1): for platform_ in platforms: interpreter = "cp{version}".format( version=_version_nodot((python_version[0], minor_version)) ) yield Tag(interpreter, "abi3", platform_)
[ "def", "cpython_tags", "(", "python_version", ":", "Optional", "[", "PythonVersion", "]", "=", "None", ",", "abis", ":", "Optional", "[", "Iterable", "[", "str", "]", "]", "=", "None", ",", "platforms", ":", "Optional", "[", "Iterable", "[", "str", "]", "]", "=", "None", ",", "*", ",", "warn", ":", "bool", "=", "False", ",", ")", "->", "Iterator", "[", "Tag", "]", ":", "if", "not", "python_version", ":", "python_version", "=", "sys", ".", "version_info", "[", ":", "2", "]", "interpreter", "=", "\"cp{}\"", ".", "format", "(", "_version_nodot", "(", "python_version", "[", ":", "2", "]", ")", ")", "if", "abis", "is", "None", ":", "if", "len", "(", "python_version", ")", ">", "1", ":", "abis", "=", "_cpython_abis", "(", "python_version", ",", "warn", ")", "else", ":", "abis", "=", "[", "]", "abis", "=", "list", "(", "abis", ")", "# 'abi3' and 'none' are explicitly handled later.", "for", "explicit_abi", "in", "(", "\"abi3\"", ",", "\"none\"", ")", ":", "try", ":", "abis", ".", "remove", "(", "explicit_abi", ")", "except", "ValueError", ":", "pass", "platforms", "=", "list", "(", "platforms", "or", "_platform_tags", "(", ")", ")", "for", "abi", "in", "abis", ":", "for", "platform_", "in", "platforms", ":", "yield", "Tag", "(", "interpreter", ",", "abi", ",", "platform_", ")", "if", "_abi3_applies", "(", "python_version", ")", ":", "yield", "from", "(", "Tag", "(", "interpreter", ",", "\"abi3\"", ",", "platform_", ")", "for", "platform_", "in", "platforms", ")", "yield", "from", "(", "Tag", "(", "interpreter", ",", "\"none\"", ",", "platform_", ")", "for", "platform_", "in", "platforms", ")", "if", "_abi3_applies", "(", "python_version", ")", ":", "for", "minor_version", "in", "range", "(", "python_version", "[", "1", "]", "-", "1", ",", "1", ",", "-", "1", ")", ":", "for", "platform_", "in", "platforms", ":", "interpreter", "=", "\"cp{version}\"", ".", "format", "(", "version", "=", "_version_nodot", "(", "(", "python_version", "[", "0", "]", ",", "minor_version", ")", ")", ")", "yield", "Tag", "(", "interpreter", ",", "\"abi3\"", ",", "platform_", ")" ]
[ 169, 0 ]
[ 223, 57 ]
python
en
['en', 'error', 'th']
False
generic_tags
( interpreter: Optional[str] = None, abis: Optional[Iterable[str]] = None, platforms: Optional[Iterable[str]] = None, *, warn: bool = False, )
Yields the tags for a generic interpreter. The tags consist of: - <interpreter>-<abi>-<platform> The "none" ABI will be added if it was not explicitly provided.
Yields the tags for a generic interpreter.
def generic_tags( interpreter: Optional[str] = None, abis: Optional[Iterable[str]] = None, platforms: Optional[Iterable[str]] = None, *, warn: bool = False, ) -> Iterator[Tag]: """ Yields the tags for a generic interpreter. The tags consist of: - <interpreter>-<abi>-<platform> The "none" ABI will be added if it was not explicitly provided. """ if not interpreter: interp_name = interpreter_name() interp_version = interpreter_version(warn=warn) interpreter = "".join([interp_name, interp_version]) if abis is None: abis = _generic_abi() platforms = list(platforms or _platform_tags()) abis = list(abis) if "none" not in abis: abis.append("none") for abi in abis: for platform_ in platforms: yield Tag(interpreter, abi, platform_)
[ "def", "generic_tags", "(", "interpreter", ":", "Optional", "[", "str", "]", "=", "None", ",", "abis", ":", "Optional", "[", "Iterable", "[", "str", "]", "]", "=", "None", ",", "platforms", ":", "Optional", "[", "Iterable", "[", "str", "]", "]", "=", "None", ",", "*", ",", "warn", ":", "bool", "=", "False", ",", ")", "->", "Iterator", "[", "Tag", "]", ":", "if", "not", "interpreter", ":", "interp_name", "=", "interpreter_name", "(", ")", "interp_version", "=", "interpreter_version", "(", "warn", "=", "warn", ")", "interpreter", "=", "\"\"", ".", "join", "(", "[", "interp_name", ",", "interp_version", "]", ")", "if", "abis", "is", "None", ":", "abis", "=", "_generic_abi", "(", ")", "platforms", "=", "list", "(", "platforms", "or", "_platform_tags", "(", ")", ")", "abis", "=", "list", "(", "abis", ")", "if", "\"none\"", "not", "in", "abis", ":", "abis", ".", "append", "(", "\"none\"", ")", "for", "abi", "in", "abis", ":", "for", "platform_", "in", "platforms", ":", "yield", "Tag", "(", "interpreter", ",", "abi", ",", "platform_", ")" ]
[ 232, 0 ]
[ 259, 50 ]
python
en
['en', 'error', 'th']
False
_py_interpreter_range
(py_version: PythonVersion)
Yields Python versions in descending order. After the latest version, the major-only version will be yielded, and then all previous versions of that major version.
Yields Python versions in descending order.
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]: """ Yields Python versions in descending order. After the latest version, the major-only version will be yielded, and then all previous versions of that major version. """ if len(py_version) > 1: yield "py{version}".format(version=_version_nodot(py_version[:2])) yield "py{major}".format(major=py_version[0]) if len(py_version) > 1: for minor in range(py_version[1] - 1, -1, -1): yield "py{version}".format(version=_version_nodot((py_version[0], minor)))
[ "def", "_py_interpreter_range", "(", "py_version", ":", "PythonVersion", ")", "->", "Iterator", "[", "str", "]", ":", "if", "len", "(", "py_version", ")", ">", "1", ":", "yield", "\"py{version}\"", ".", "format", "(", "version", "=", "_version_nodot", "(", "py_version", "[", ":", "2", "]", ")", ")", "yield", "\"py{major}\"", ".", "format", "(", "major", "=", "py_version", "[", "0", "]", ")", "if", "len", "(", "py_version", ")", ">", "1", ":", "for", "minor", "in", "range", "(", "py_version", "[", "1", "]", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "yield", "\"py{version}\"", ".", "format", "(", "version", "=", "_version_nodot", "(", "(", "py_version", "[", "0", "]", ",", "minor", ")", ")", ")" ]
[ 262, 0 ]
[ 274, 86 ]
python
en
['en', 'error', 'th']
False
compatible_tags
( python_version: Optional[PythonVersion] = None, interpreter: Optional[str] = None, platforms: Optional[Iterable[str]] = None, )
Yields the sequence of tags that are compatible with a specific version of Python. The tags consist of: - py*-none-<platform> - <interpreter>-none-any # ... if `interpreter` is provided. - py*-none-any
Yields the sequence of tags that are compatible with a specific version of Python.
def compatible_tags( python_version: Optional[PythonVersion] = None, interpreter: Optional[str] = None, platforms: Optional[Iterable[str]] = None, ) -> Iterator[Tag]: """ Yields the sequence of tags that are compatible with a specific version of Python. The tags consist of: - py*-none-<platform> - <interpreter>-none-any # ... if `interpreter` is provided. - py*-none-any """ if not python_version: python_version = sys.version_info[:2] platforms = list(platforms or _platform_tags()) for version in _py_interpreter_range(python_version): for platform_ in platforms: yield Tag(version, "none", platform_) if interpreter: yield Tag(interpreter, "none", "any") for version in _py_interpreter_range(python_version): yield Tag(version, "none", "any")
[ "def", "compatible_tags", "(", "python_version", ":", "Optional", "[", "PythonVersion", "]", "=", "None", ",", "interpreter", ":", "Optional", "[", "str", "]", "=", "None", ",", "platforms", ":", "Optional", "[", "Iterable", "[", "str", "]", "]", "=", "None", ",", ")", "->", "Iterator", "[", "Tag", "]", ":", "if", "not", "python_version", ":", "python_version", "=", "sys", ".", "version_info", "[", ":", "2", "]", "platforms", "=", "list", "(", "platforms", "or", "_platform_tags", "(", ")", ")", "for", "version", "in", "_py_interpreter_range", "(", "python_version", ")", ":", "for", "platform_", "in", "platforms", ":", "yield", "Tag", "(", "version", ",", "\"none\"", ",", "platform_", ")", "if", "interpreter", ":", "yield", "Tag", "(", "interpreter", ",", "\"none\"", ",", "\"any\"", ")", "for", "version", "in", "_py_interpreter_range", "(", "python_version", ")", ":", "yield", "Tag", "(", "version", ",", "\"none\"", ",", "\"any\"", ")" ]
[ 277, 0 ]
[ 299, 41 ]
python
en
['en', 'error', 'th']
False
mac_platforms
( version: Optional[MacVersion] = None, arch: Optional[str] = None )
Yields the platform tags for a macOS system. The `version` parameter is a two-item tuple specifying the macOS version to generate platform tags for. The `arch` parameter is the CPU architecture to generate platform tags for. Both parameters default to the appropriate value for the current system.
Yields the platform tags for a macOS system.
def mac_platforms( version: Optional[MacVersion] = None, arch: Optional[str] = None ) -> Iterator[str]: """ Yields the platform tags for a macOS system. The `version` parameter is a two-item tuple specifying the macOS version to generate platform tags for. The `arch` parameter is the CPU architecture to generate platform tags for. Both parameters default to the appropriate value for the current system. """ version_str, _, cpu_arch = platform.mac_ver() if version is None: version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2]))) else: version = version if arch is None: arch = _mac_arch(cpu_arch) else: arch = arch if (10, 0) <= version and version < (11, 0): # Prior to Mac OS 11, each yearly release of Mac OS bumped the # "minor" version number. The major version was always 10. for minor_version in range(version[1], -1, -1): compat_version = 10, minor_version binary_formats = _mac_binary_formats(compat_version, arch) for binary_format in binary_formats: yield "macosx_{major}_{minor}_{binary_format}".format( major=10, minor=minor_version, binary_format=binary_format ) if version >= (11, 0): # Starting with Mac OS 11, each yearly release bumps the major version # number. The minor versions are now the midyear updates. for major_version in range(version[0], 10, -1): compat_version = major_version, 0 binary_formats = _mac_binary_formats(compat_version, arch) for binary_format in binary_formats: yield "macosx_{major}_{minor}_{binary_format}".format( major=major_version, minor=0, binary_format=binary_format ) if version >= (11, 0): # Mac OS 11 on x86_64 is compatible with binaries from previous releases. # Arm64 support was introduced in 11.0, so no Arm binaries from previous # releases exist. # # However, the "universal2" binary format can have a # macOS version earlier than 11.0 when the x86_64 part of the binary supports # that version of macOS. if arch == "x86_64": for minor_version in range(16, 3, -1): compat_version = 10, minor_version binary_formats = _mac_binary_formats(compat_version, arch) for binary_format in binary_formats: yield "macosx_{major}_{minor}_{binary_format}".format( major=compat_version[0], minor=compat_version[1], binary_format=binary_format, ) else: for minor_version in range(16, 3, -1): compat_version = 10, minor_version binary_format = "universal2" yield "macosx_{major}_{minor}_{binary_format}".format( major=compat_version[0], minor=compat_version[1], binary_format=binary_format, )
[ "def", "mac_platforms", "(", "version", ":", "Optional", "[", "MacVersion", "]", "=", "None", ",", "arch", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Iterator", "[", "str", "]", ":", "version_str", ",", "_", ",", "cpu_arch", "=", "platform", ".", "mac_ver", "(", ")", "if", "version", "is", "None", ":", "version", "=", "cast", "(", "\"MacVersion\"", ",", "tuple", "(", "map", "(", "int", ",", "version_str", ".", "split", "(", "\".\"", ")", "[", ":", "2", "]", ")", ")", ")", "else", ":", "version", "=", "version", "if", "arch", "is", "None", ":", "arch", "=", "_mac_arch", "(", "cpu_arch", ")", "else", ":", "arch", "=", "arch", "if", "(", "10", ",", "0", ")", "<=", "version", "and", "version", "<", "(", "11", ",", "0", ")", ":", "# Prior to Mac OS 11, each yearly release of Mac OS bumped the", "# \"minor\" version number. The major version was always 10.", "for", "minor_version", "in", "range", "(", "version", "[", "1", "]", ",", "-", "1", ",", "-", "1", ")", ":", "compat_version", "=", "10", ",", "minor_version", "binary_formats", "=", "_mac_binary_formats", "(", "compat_version", ",", "arch", ")", "for", "binary_format", "in", "binary_formats", ":", "yield", "\"macosx_{major}_{minor}_{binary_format}\"", ".", "format", "(", "major", "=", "10", ",", "minor", "=", "minor_version", ",", "binary_format", "=", "binary_format", ")", "if", "version", ">=", "(", "11", ",", "0", ")", ":", "# Starting with Mac OS 11, each yearly release bumps the major version", "# number. The minor versions are now the midyear updates.", "for", "major_version", "in", "range", "(", "version", "[", "0", "]", ",", "10", ",", "-", "1", ")", ":", "compat_version", "=", "major_version", ",", "0", "binary_formats", "=", "_mac_binary_formats", "(", "compat_version", ",", "arch", ")", "for", "binary_format", "in", "binary_formats", ":", "yield", "\"macosx_{major}_{minor}_{binary_format}\"", ".", "format", "(", "major", "=", "major_version", ",", "minor", "=", "0", ",", "binary_format", "=", "binary_format", ")", "if", "version", ">=", "(", "11", ",", "0", ")", ":", "# Mac OS 11 on x86_64 is compatible with binaries from previous releases.", "# Arm64 support was introduced in 11.0, so no Arm binaries from previous", "# releases exist.", "#", "# However, the \"universal2\" binary format can have a", "# macOS version earlier than 11.0 when the x86_64 part of the binary supports", "# that version of macOS.", "if", "arch", "==", "\"x86_64\"", ":", "for", "minor_version", "in", "range", "(", "16", ",", "3", ",", "-", "1", ")", ":", "compat_version", "=", "10", ",", "minor_version", "binary_formats", "=", "_mac_binary_formats", "(", "compat_version", ",", "arch", ")", "for", "binary_format", "in", "binary_formats", ":", "yield", "\"macosx_{major}_{minor}_{binary_format}\"", ".", "format", "(", "major", "=", "compat_version", "[", "0", "]", ",", "minor", "=", "compat_version", "[", "1", "]", ",", "binary_format", "=", "binary_format", ",", ")", "else", ":", "for", "minor_version", "in", "range", "(", "16", ",", "3", ",", "-", "1", ")", ":", "compat_version", "=", "10", ",", "minor_version", "binary_format", "=", "\"universal2\"", "yield", "\"macosx_{major}_{minor}_{binary_format}\"", ".", "format", "(", "major", "=", "compat_version", "[", "0", "]", ",", "minor", "=", "compat_version", "[", "1", "]", ",", "binary_format", "=", "binary_format", ",", ")" ]
[ 344, 0 ]
[ 413, 17 ]
python
en
['en', 'error', 'th']
False
_platform_tags
()
Provides the platform tags for this installation.
Provides the platform tags for this installation.
def _platform_tags() -> Iterator[str]: """ Provides the platform tags for this installation. """ if platform.system() == "Darwin": return mac_platforms() elif platform.system() == "Linux": return _linux_platforms() else: return _generic_platforms()
[ "def", "_platform_tags", "(", ")", "->", "Iterator", "[", "str", "]", ":", "if", "platform", ".", "system", "(", ")", "==", "\"Darwin\"", ":", "return", "mac_platforms", "(", ")", "elif", "platform", ".", "system", "(", ")", "==", "\"Linux\"", ":", "return", "_linux_platforms", "(", ")", "else", ":", "return", "_generic_platforms", "(", ")" ]
[ 433, 0 ]
[ 442, 35 ]
python
en
['en', 'error', 'th']
False
interpreter_name
()
Returns the name of the running interpreter.
Returns the name of the running interpreter.
def interpreter_name() -> str: """ Returns the name of the running interpreter. """ name = sys.implementation.name return INTERPRETER_SHORT_NAMES.get(name) or name
[ "def", "interpreter_name", "(", ")", "->", "str", ":", "name", "=", "sys", ".", "implementation", ".", "name", "return", "INTERPRETER_SHORT_NAMES", ".", "get", "(", "name", ")", "or", "name" ]
[ 445, 0 ]
[ 450, 52 ]
python
en
['en', 'error', 'th']
False
interpreter_version
(*, warn: bool = False)
Returns the version of the running interpreter.
Returns the version of the running interpreter.
def interpreter_version(*, warn: bool = False) -> str: """ Returns the version of the running interpreter. """ version = _get_config_var("py_version_nodot", warn=warn) if version: version = str(version) else: version = _version_nodot(sys.version_info[:2]) return version
[ "def", "interpreter_version", "(", "*", ",", "warn", ":", "bool", "=", "False", ")", "->", "str", ":", "version", "=", "_get_config_var", "(", "\"py_version_nodot\"", ",", "warn", "=", "warn", ")", "if", "version", ":", "version", "=", "str", "(", "version", ")", "else", ":", "version", "=", "_version_nodot", "(", "sys", ".", "version_info", "[", ":", "2", "]", ")", "return", "version" ]
[ 453, 0 ]
[ 462, 18 ]
python
en
['en', 'error', 'th']
False
sys_tags
(*, warn: bool = False)
Returns the sequence of tag triples for the running interpreter. The order of the sequence corresponds to priority order for the interpreter, from most to least important.
Returns the sequence of tag triples for the running interpreter.
def sys_tags(*, warn: bool = False) -> Iterator[Tag]: """ Returns the sequence of tag triples for the running interpreter. The order of the sequence corresponds to priority order for the interpreter, from most to least important. """ interp_name = interpreter_name() if interp_name == "cp": yield from cpython_tags(warn=warn) else: yield from generic_tags() yield from compatible_tags()
[ "def", "sys_tags", "(", "*", ",", "warn", ":", "bool", "=", "False", ")", "->", "Iterator", "[", "Tag", "]", ":", "interp_name", "=", "interpreter_name", "(", ")", "if", "interp_name", "==", "\"cp\"", ":", "yield", "from", "cpython_tags", "(", "warn", "=", "warn", ")", "else", ":", "yield", "from", "generic_tags", "(", ")", "yield", "from", "compatible_tags", "(", ")" ]
[ 469, 0 ]
[ 483, 32 ]
python
en
['en', 'error', 'th']
False
Feature.__init__
(self, feat, layer)
Initialize Feature from a pointer and its Layer object.
Initialize Feature from a pointer and its Layer object.
def __init__(self, feat, layer): """ Initialize Feature from a pointer and its Layer object. """ if not feat: raise GDALException('Cannot create OGR Feature, invalid pointer given.') self.ptr = feat self._layer = layer
[ "def", "__init__", "(", "self", ",", "feat", ",", "layer", ")", ":", "if", "not", "feat", ":", "raise", "GDALException", "(", "'Cannot create OGR Feature, invalid pointer given.'", ")", "self", ".", "ptr", "=", "feat", "self", ".", "_layer", "=", "layer" ]
[ 19, 4 ]
[ 26, 27 ]
python
en
['en', 'error', 'th']
False
Feature.__getitem__
(self, index)
Get the Field object at the specified index, which may be either an integer or the Field's string label. Note that the Field object is not the field's _value_ -- use the `get` method instead to retrieve the value (e.g. an integer) instead of a Field instance.
Get the Field object at the specified index, which may be either an integer or the Field's string label. Note that the Field object is not the field's _value_ -- use the `get` method instead to retrieve the value (e.g. an integer) instead of a Field instance.
def __getitem__(self, index): """ Get the Field object at the specified index, which may be either an integer or the Field's string label. Note that the Field object is not the field's _value_ -- use the `get` method instead to retrieve the value (e.g. an integer) instead of a Field instance. """ if isinstance(index, str): i = self.index(index) elif 0 <= index < self.num_fields: i = index else: raise IndexError('Index out of range when accessing field in a feature: %s.' % index) return Field(self, i)
[ "def", "__getitem__", "(", "self", ",", "index", ")", ":", "if", "isinstance", "(", "index", ",", "str", ")", ":", "i", "=", "self", ".", "index", "(", "index", ")", "elif", "0", "<=", "index", "<", "self", ".", "num_fields", ":", "i", "=", "index", "else", ":", "raise", "IndexError", "(", "'Index out of range when accessing field in a feature: %s.'", "%", "index", ")", "return", "Field", "(", "self", ",", "i", ")" ]
[ 28, 4 ]
[ 41, 29 ]
python
en
['en', 'error', 'th']
False
Feature.__len__
(self)
Return the count of fields in this feature.
Return the count of fields in this feature.
def __len__(self): "Return the count of fields in this feature." return self.num_fields
[ "def", "__len__", "(", "self", ")", ":", "return", "self", ".", "num_fields" ]
[ 43, 4 ]
[ 45, 30 ]
python
en
['en', 'en', 'en']
True
Feature.__str__
(self)
The string name of the feature.
The string name of the feature.
def __str__(self): "The string name of the feature." return 'Feature FID %d in Layer<%s>' % (self.fid, self.layer_name)
[ "def", "__str__", "(", "self", ")", ":", "return", "'Feature FID %d in Layer<%s>'", "%", "(", "self", ".", "fid", ",", "self", ".", "layer_name", ")" ]
[ 47, 4 ]
[ 49, 74 ]
python
en
['en', 'en', 'en']
True
Feature.__eq__
(self, other)
Do equivalence testing on the features.
Do equivalence testing on the features.
def __eq__(self, other): "Do equivalence testing on the features." return bool(capi.feature_equal(self.ptr, other._ptr))
[ "def", "__eq__", "(", "self", ",", "other", ")", ":", "return", "bool", "(", "capi", ".", "feature_equal", "(", "self", ".", "ptr", ",", "other", ".", "_ptr", ")", ")" ]
[ 51, 4 ]
[ 53, 61 ]
python
en
['en', 'en', 'en']
True
Feature.fid
(self)
Return the feature identifier.
Return the feature identifier.
def fid(self): "Return the feature identifier." return capi.get_fid(self.ptr)
[ "def", "fid", "(", "self", ")", ":", "return", "capi", ".", "get_fid", "(", "self", ".", "ptr", ")" ]
[ 61, 4 ]
[ 63, 37 ]
python
en
['en', 'fy', 'en']
True
Feature.layer_name
(self)
Return the name of the layer for the feature.
Return the name of the layer for the feature.
def layer_name(self): "Return the name of the layer for the feature." name = capi.get_feat_name(self._layer._ldefn) return force_str(name, self.encoding, strings_only=True)
[ "def", "layer_name", "(", "self", ")", ":", "name", "=", "capi", ".", "get_feat_name", "(", "self", ".", "_layer", ".", "_ldefn", ")", "return", "force_str", "(", "name", ",", "self", ".", "encoding", ",", "strings_only", "=", "True", ")" ]
[ 66, 4 ]
[ 69, 64 ]
python
en
['en', 'en', 'en']
True
Feature.num_fields
(self)
Return the number of fields in the Feature.
Return the number of fields in the Feature.
def num_fields(self): "Return the number of fields in the Feature." return capi.get_feat_field_count(self.ptr)
[ "def", "num_fields", "(", "self", ")", ":", "return", "capi", ".", "get_feat_field_count", "(", "self", ".", "ptr", ")" ]
[ 72, 4 ]
[ 74, 50 ]
python
en
['en', 'en', 'en']
True
Feature.fields
(self)
Return a list of fields in the Feature.
Return a list of fields in the Feature.
def fields(self): "Return a list of fields in the Feature." return [ force_str( capi.get_field_name(capi.get_field_defn(self._layer._ldefn, i)), self.encoding, strings_only=True ) for i in range(self.num_fields) ]
[ "def", "fields", "(", "self", ")", ":", "return", "[", "force_str", "(", "capi", ".", "get_field_name", "(", "capi", ".", "get_field_defn", "(", "self", ".", "_layer", ".", "_ldefn", ",", "i", ")", ")", ",", "self", ".", "encoding", ",", "strings_only", "=", "True", ")", "for", "i", "in", "range", "(", "self", ".", "num_fields", ")", "]" ]
[ 77, 4 ]
[ 85, 9 ]
python
en
['en', 'en', 'en']
True
Feature.geom
(self)
Return the OGR Geometry for this Feature.
Return the OGR Geometry for this Feature.
def geom(self): "Return the OGR Geometry for this Feature." # Retrieving the geometry pointer for the feature. geom_ptr = capi.get_feat_geom_ref(self.ptr) return OGRGeometry(geom_api.clone_geom(geom_ptr))
[ "def", "geom", "(", "self", ")", ":", "# Retrieving the geometry pointer for the feature.", "geom_ptr", "=", "capi", ".", "get_feat_geom_ref", "(", "self", ".", "ptr", ")", "return", "OGRGeometry", "(", "geom_api", ".", "clone_geom", "(", "geom_ptr", ")", ")" ]
[ 88, 4 ]
[ 92, 57 ]
python
en
['en', 'en', 'en']
True
Feature.geom_type
(self)
Return the OGR Geometry Type for this Feature.
Return the OGR Geometry Type for this Feature.
def geom_type(self): "Return the OGR Geometry Type for this Feature." return OGRGeomType(capi.get_fd_geom_type(self._layer._ldefn))
[ "def", "geom_type", "(", "self", ")", ":", "return", "OGRGeomType", "(", "capi", ".", "get_fd_geom_type", "(", "self", ".", "_layer", ".", "_ldefn", ")", ")" ]
[ 95, 4 ]
[ 97, 69 ]
python
en
['en', 'en', 'en']
True
Feature.get
(self, field)
Return the value of the field, instead of an instance of the Field object. May take a string of the field name or a Field object as parameters.
Return the value of the field, instead of an instance of the Field object. May take a string of the field name or a Field object as parameters.
def get(self, field): """ Return the value of the field, instead of an instance of the Field object. May take a string of the field name or a Field object as parameters. """ field_name = getattr(field, 'name', field) return self[field_name].value
[ "def", "get", "(", "self", ",", "field", ")", ":", "field_name", "=", "getattr", "(", "field", ",", "'name'", ",", "field", ")", "return", "self", "[", "field_name", "]", ".", "value" ]
[ 100, 4 ]
[ 107, 37 ]
python
en
['en', 'error', 'th']
False
Feature.index
(self, field_name)
Return the index of the given field name.
Return the index of the given field name.
def index(self, field_name): "Return the index of the given field name." i = capi.get_field_index(self.ptr, force_bytes(field_name)) if i < 0: raise IndexError('Invalid OFT field name given: %s.' % field_name) return i
[ "def", "index", "(", "self", ",", "field_name", ")", ":", "i", "=", "capi", ".", "get_field_index", "(", "self", ".", "ptr", ",", "force_bytes", "(", "field_name", ")", ")", "if", "i", "<", "0", ":", "raise", "IndexError", "(", "'Invalid OFT field name given: %s.'", "%", "field_name", ")", "return", "i" ]
[ 109, 4 ]
[ 114, 16 ]
python
en
['en', 'en', 'en']
True
View.dispatch_request
(self)
Subclasses have to override this method to implement the actual view function code. This method is called with all the arguments from the URL rule.
Subclasses have to override this method to implement the actual view function code. This method is called with all the arguments from the URL rule.
def dispatch_request(self): """Subclasses have to override this method to implement the actual view function code. This method is called with all the arguments from the URL rule. """ raise NotImplementedError()
[ "def", "dispatch_request", "(", "self", ")", ":", "raise", "NotImplementedError", "(", ")" ]
[ 64, 4 ]
[ 69, 35 ]
python
en
['en', 'en', 'en']
True
View.as_view
(cls, name, *class_args, **class_kwargs)
Converts the class into an actual view function that can be used with the routing system. Internally this generates a function on the fly which will instantiate the :class:`View` on each request and call the :meth:`dispatch_request` method on it. The arguments passed to :meth:`as_view` are forwarded to the constructor of the class.
Converts the class into an actual view function that can be used with the routing system. Internally this generates a function on the fly which will instantiate the :class:`View` on each request and call the :meth:`dispatch_request` method on it.
def as_view(cls, name, *class_args, **class_kwargs): """Converts the class into an actual view function that can be used with the routing system. Internally this generates a function on the fly which will instantiate the :class:`View` on each request and call the :meth:`dispatch_request` method on it. The arguments passed to :meth:`as_view` are forwarded to the constructor of the class. """ def view(*args, **kwargs): self = view.view_class(*class_args, **class_kwargs) return self.dispatch_request(*args, **kwargs) if cls.decorators: view.__name__ = name view.__module__ = cls.__module__ for decorator in cls.decorators: view = decorator(view) # We attach the view class to the view function for two reasons: # first of all it allows us to easily figure out what class-based # view this thing came from, secondly it's also used for instantiating # the view class so you can actually replace it with something else # for testing purposes and debugging. view.view_class = cls view.__name__ = name view.__doc__ = cls.__doc__ view.__module__ = cls.__module__ view.methods = cls.methods return view
[ "def", "as_view", "(", "cls", ",", "name", ",", "*", "class_args", ",", "*", "*", "class_kwargs", ")", ":", "def", "view", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", "=", "view", ".", "view_class", "(", "*", "class_args", ",", "*", "*", "class_kwargs", ")", "return", "self", ".", "dispatch_request", "(", "*", "args", ",", "*", "*", "kwargs", ")", "if", "cls", ".", "decorators", ":", "view", ".", "__name__", "=", "name", "view", ".", "__module__", "=", "cls", ".", "__module__", "for", "decorator", "in", "cls", ".", "decorators", ":", "view", "=", "decorator", "(", "view", ")", "# We attach the view class to the view function for two reasons:", "# first of all it allows us to easily figure out what class-based", "# view this thing came from, secondly it's also used for instantiating", "# the view class so you can actually replace it with something else", "# for testing purposes and debugging.", "view", ".", "view_class", "=", "cls", "view", ".", "__name__", "=", "name", "view", ".", "__doc__", "=", "cls", ".", "__doc__", "view", ".", "__module__", "=", "cls", ".", "__module__", "view", ".", "methods", "=", "cls", ".", "methods", "return", "view" ]
[ 72, 4 ]
[ 101, 19 ]
python
en
['en', 'en', 'en']
True
jsonp_loader
(url, prefix_regex=r'^(.*\()', suffix_regex=r'(\);)$', sub_d=None, sub_by='')
Request (JSON) data from a server in a different domain (JSONP) and covert to python readable data. 1. url is the url (https) where data is located 2. "prefix_regex" and "suffix_regex" are regex patterns used to remove JSONP specific prefix and suffix, such as callback header: "callback(" and end: ");", 3. "sub_d" is regex patterns for any unwanted string in loaded json data (will be replaced by sub_by). 4. "sub_by" is the string to replace any unwanted string defined by sub_d For function coverstion, such as Data.UTC to datetime.datetime, please check JSONPDecoder
Request (JSON) data from a server in a different domain (JSONP) and covert to python readable data. 1. url is the url (https) where data is located 2. "prefix_regex" and "suffix_regex" are regex patterns used to remove JSONP specific prefix and suffix, such as callback header: "callback(" and end: ");", 3. "sub_d" is regex patterns for any unwanted string in loaded json data (will be replaced by sub_by). 4. "sub_by" is the string to replace any unwanted string defined by sub_d For function coverstion, such as Data.UTC to datetime.datetime, please check JSONPDecoder
def jsonp_loader(url, prefix_regex=r'^(.*\()', suffix_regex=r'(\);)$', sub_d=None, sub_by=''): """Request (JSON) data from a server in a different domain (JSONP) and covert to python readable data. 1. url is the url (https) where data is located 2. "prefix_regex" and "suffix_regex" are regex patterns used to remove JSONP specific prefix and suffix, such as callback header: "callback(" and end: ");", 3. "sub_d" is regex patterns for any unwanted string in loaded json data (will be replaced by sub_by). 4. "sub_by" is the string to replace any unwanted string defined by sub_d For function coverstion, such as Data.UTC to datetime.datetime, please check JSONPDecoder """ hdr = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.77 Safari/535.7'} req = urllib.request.Request(url, headers=hdr) page = urlopen(req) result = page.read().decode('utf-8') # replace all the redundant info with sub_by if sub_d: result = re.sub(sub_d, sub_by, result) prefix = re.search(prefix_regex, result).group() suffix = re.search(suffix_regex, result).group() if result.startswith(prefix) and result.endswith(suffix): result = result[len(prefix):-len(suffix)] return json.loads(result, encoding='utf8', cls=JSONPDecoder)
[ "def", "jsonp_loader", "(", "url", ",", "prefix_regex", "=", "r'^(.*\\()'", ",", "suffix_regex", "=", "r'(\\);)$'", ",", "sub_d", "=", "None", ",", "sub_by", "=", "''", ")", ":", "hdr", "=", "{", "'User-Agent'", ":", "'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.77 Safari/535.7'", "}", "req", "=", "urllib", ".", "request", ".", "Request", "(", "url", ",", "headers", "=", "hdr", ")", "page", "=", "urlopen", "(", "req", ")", "result", "=", "page", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "# replace all the redundant info with sub_by ", "if", "sub_d", ":", "result", "=", "re", ".", "sub", "(", "sub_d", ",", "sub_by", ",", "result", ")", "prefix", "=", "re", ".", "search", "(", "prefix_regex", ",", "result", ")", ".", "group", "(", ")", "suffix", "=", "re", ".", "search", "(", "suffix_regex", ",", "result", ")", ".", "group", "(", ")", "if", "result", ".", "startswith", "(", "prefix", ")", "and", "result", ".", "endswith", "(", "suffix", ")", ":", "result", "=", "result", "[", "len", "(", "prefix", ")", ":", "-", "len", "(", "suffix", ")", "]", "return", "json", ".", "loads", "(", "result", ",", "encoding", "=", "'utf8'", ",", "cls", "=", "JSONPDecoder", ")" ]
[ 12, 0 ]
[ 35, 64 ]
python
en
['en', 'en', 'en']
True
js_map_loader
(url)
Load map data from a .js source. It is designed for using highcharts' map collection: https://code.highcharts.com/mapdata/. Map data from other sources are not guaranteed
Load map data from a .js source. It is designed for using highcharts' map collection: https://code.highcharts.com/mapdata/. Map data from other sources are not guaranteed
def js_map_loader(url): """Load map data from a .js source. It is designed for using highcharts' map collection: https://code.highcharts.com/mapdata/. Map data from other sources are not guaranteed """ hdr = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.77 Safari/535.7'} req = urllib.request.Request(url, headers=hdr) page = urlopen(req) result = page.read().decode('utf-8') result = result[len(re.search(r'^.* = ', result).group()):] return json.loads(result)
[ "def", "js_map_loader", "(", "url", ")", ":", "hdr", "=", "{", "'User-Agent'", ":", "'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/535.7 (KHTML, like Gecko) Chrome/16.0.912.77 Safari/535.7'", "}", "req", "=", "urllib", ".", "request", ".", "Request", "(", "url", ",", "headers", "=", "hdr", ")", "page", "=", "urlopen", "(", "req", ")", "result", "=", "page", ".", "read", "(", ")", ".", "decode", "(", "'utf-8'", ")", "result", "=", "result", "[", "len", "(", "re", ".", "search", "(", "r'^.* = '", ",", "result", ")", ".", "group", "(", ")", ")", ":", "]", "return", "json", ".", "loads", "(", "result", ")" ]
[ 37, 0 ]
[ 48, 29 ]
python
en
['en', 'en', 'en']
True
geojson_handler
(geojson, hType='map')
Restructure a GeoJSON object in preparation to be added directly by add_map_data or add_data_set methods. The geojson will be broken down to fit a specific Highcharts (highmaps) type, either map, mapline or mappoint. Meta data in GeoJSON's properties object will be copied directly over to object['properties'] 1. geojson is the map data (GeoJSON) to be converted 2. hType is the type of highmap types. "map" will return GeoJSON polygons and multipolygons. "mapline" will return GeoJSON linestrings and multilinestrings. "mappoint" will return GeoJSON points and multipoints. default: "map"
Restructure a GeoJSON object in preparation to be added directly by add_map_data or add_data_set methods. The geojson will be broken down to fit a specific Highcharts (highmaps) type, either map, mapline or mappoint. Meta data in GeoJSON's properties object will be copied directly over to object['properties'] 1. geojson is the map data (GeoJSON) to be converted 2. hType is the type of highmap types. "map" will return GeoJSON polygons and multipolygons. "mapline" will return GeoJSON linestrings and multilinestrings. "mappoint" will return GeoJSON points and multipoints. default: "map"
def geojson_handler(geojson, hType='map'): """Restructure a GeoJSON object in preparation to be added directly by add_map_data or add_data_set methods. The geojson will be broken down to fit a specific Highcharts (highmaps) type, either map, mapline or mappoint. Meta data in GeoJSON's properties object will be copied directly over to object['properties'] 1. geojson is the map data (GeoJSON) to be converted 2. hType is the type of highmap types. "map" will return GeoJSON polygons and multipolygons. "mapline" will return GeoJSON linestrings and multilinestrings. "mappoint" will return GeoJSON points and multipoints. default: "map" """ hType_dict = { 'map': ['polygon', 'multipolygon'], 'mapline': ['linestring', 'multilinestring'], 'mappoint': ['point', 'multipoint'], } oldlist = [x for x in geojson['features'] if x['geometry']['type'].lower() in hType_dict[hType]] newlist = [] for each_dict in oldlist: geojson_type = each_dict['geometry']['type'].lower() if hType == 'mapline': newlist.append( {'name': each_dict['properties'].get('name', None), 'path': _coordinates_to_path(each_dict['geometry']['coordinates'], hType, geojson_type), 'properties': each_dict['properties'], } ) elif hType == 'map': newlist.append( {'name': each_dict['properties']['name'], 'path': _coordinates_to_path(each_dict['geometry']['coordinates'], hType, geojson_type), 'properties': each_dict['properties'], } ) elif hType == 'mappoint': newlist.append( {'name': each_dict['properties']['name'], 'x': each_dict['geometry']['coordinates'][0], 'y': -each_dict['geometry']['coordinates'][1], 'properties': each_dict['properties'], } ) return newlist
[ "def", "geojson_handler", "(", "geojson", ",", "hType", "=", "'map'", ")", ":", "hType_dict", "=", "{", "'map'", ":", "[", "'polygon'", ",", "'multipolygon'", "]", ",", "'mapline'", ":", "[", "'linestring'", ",", "'multilinestring'", "]", ",", "'mappoint'", ":", "[", "'point'", ",", "'multipoint'", "]", ",", "}", "oldlist", "=", "[", "x", "for", "x", "in", "geojson", "[", "'features'", "]", "if", "x", "[", "'geometry'", "]", "[", "'type'", "]", ".", "lower", "(", ")", "in", "hType_dict", "[", "hType", "]", "]", "newlist", "=", "[", "]", "for", "each_dict", "in", "oldlist", ":", "geojson_type", "=", "each_dict", "[", "'geometry'", "]", "[", "'type'", "]", ".", "lower", "(", ")", "if", "hType", "==", "'mapline'", ":", "newlist", ".", "append", "(", "{", "'name'", ":", "each_dict", "[", "'properties'", "]", ".", "get", "(", "'name'", ",", "None", ")", ",", "'path'", ":", "_coordinates_to_path", "(", "each_dict", "[", "'geometry'", "]", "[", "'coordinates'", "]", ",", "hType", ",", "geojson_type", ")", ",", "'properties'", ":", "each_dict", "[", "'properties'", "]", ",", "}", ")", "elif", "hType", "==", "'map'", ":", "newlist", ".", "append", "(", "{", "'name'", ":", "each_dict", "[", "'properties'", "]", "[", "'name'", "]", ",", "'path'", ":", "_coordinates_to_path", "(", "each_dict", "[", "'geometry'", "]", "[", "'coordinates'", "]", ",", "hType", ",", "geojson_type", ")", ",", "'properties'", ":", "each_dict", "[", "'properties'", "]", ",", "}", ")", "elif", "hType", "==", "'mappoint'", ":", "newlist", ".", "append", "(", "{", "'name'", ":", "each_dict", "[", "'properties'", "]", "[", "'name'", "]", ",", "'x'", ":", "each_dict", "[", "'geometry'", "]", "[", "'coordinates'", "]", "[", "0", "]", ",", "'y'", ":", "-", "each_dict", "[", "'geometry'", "]", "[", "'coordinates'", "]", "[", "1", "]", ",", "'properties'", ":", "each_dict", "[", "'properties'", "]", ",", "}", ")", "return", "newlist" ]
[ 50, 0 ]
[ 96, 18 ]
python
en
['en', 'en', 'en']
True
JSONPDecoder.decode
(self, json_string)
json_string is basicly string that you give to json.loads method
json_string is basicly string that you give to json.loads method
def decode(self, json_string): """ json_string is basicly string that you give to json.loads method """ default_obj = super(JSONPDecoder, self).decode(json_string) return list(self._iterdecode(default_obj))[0]
[ "def", "decode", "(", "self", ",", "json_string", ")", ":", "default_obj", "=", "super", "(", "JSONPDecoder", ",", "self", ")", ".", "decode", "(", "json_string", ")", "return", "list", "(", "self", ".", "_iterdecode", "(", "default_obj", ")", ")", "[", "0", "]" ]
[ 161, 4 ]
[ 168, 53 ]
python
en
['en', 'error', 'th']
False
JSONPDecoder.is_js_date_utc
(json)
Check if the string contains Date.UTC function and return match group(s) if there is
Check if the string contains Date.UTC function and return match group(s) if there is
def is_js_date_utc(json): """Check if the string contains Date.UTC function and return match group(s) if there is """ JS_date_utc_pattern = r'Date\.UTC\(([0-9]+,[0-9]+,[0-9]+)(,[0-9]+,[0-9]+,[0-9]+)?(,[0-9]+)?\)' re_date = re.compile(JS_date_utc_pattern, re.M) if re_date.search(json): return re_date.search(json).group(0) else: return False
[ "def", "is_js_date_utc", "(", "json", ")", ":", "JS_date_utc_pattern", "=", "r'Date\\.UTC\\(([0-9]+,[0-9]+,[0-9]+)(,[0-9]+,[0-9]+,[0-9]+)?(,[0-9]+)?\\)'", "re_date", "=", "re", ".", "compile", "(", "JS_date_utc_pattern", ",", "re", ".", "M", ")", "if", "re_date", ".", "search", "(", "json", ")", ":", "return", "re_date", ".", "search", "(", "json", ")", ".", "group", "(", "0", ")", "else", ":", "return", "False" ]
[ 201, 4 ]
[ 212, 24 ]
python
en
['en', 'en', 'en']
True
JSONPDecoder.json2datetime
(json)
Convert JSON representation to date or datetime object depending on the argument count. Requires UTC datetime representation. Raises ValueError if the string cannot be parsed.
Convert JSON representation to date or datetime object depending on the argument count. Requires UTC datetime representation. Raises ValueError if the string cannot be parsed.
def json2datetime(json): """Convert JSON representation to date or datetime object depending on the argument count. Requires UTC datetime representation. Raises ValueError if the string cannot be parsed. """ json_m = re.search(r'([0-9]+,[0-9]+,[0-9]+)(,[0-9]+,[0-9]+,[0-9]+)?(,[0-9]+)?', json) args=json_m.group(0).split(',') try: args=map(int, args) except ValueError: raise ValueError('Invalid arguments: %s'%json) if len(args)==3: return datetime.datetime(args[0], args[1]+1, args[2]) elif len(args)==6: return datetime.datetime(args[0], args[1]+1, args[2], args[3], args[4], args[5], tzinfo=UTC()) elif len(args)==7: args[6]*=1000 return datetime.datetime(args[0], args[1]+1, args[2], args[3], args[4], args[5], args[6], tzinfo=UTC()) raise ValueError('Invalid number of arguments: %s'%json)
[ "def", "json2datetime", "(", "json", ")", ":", "json_m", "=", "re", ".", "search", "(", "r'([0-9]+,[0-9]+,[0-9]+)(,[0-9]+,[0-9]+,[0-9]+)?(,[0-9]+)?'", ",", "json", ")", "args", "=", "json_m", ".", "group", "(", "0", ")", ".", "split", "(", "','", ")", "try", ":", "args", "=", "map", "(", "int", ",", "args", ")", "except", "ValueError", ":", "raise", "ValueError", "(", "'Invalid arguments: %s'", "%", "json", ")", "if", "len", "(", "args", ")", "==", "3", ":", "return", "datetime", ".", "datetime", "(", "args", "[", "0", "]", ",", "args", "[", "1", "]", "+", "1", ",", "args", "[", "2", "]", ")", "elif", "len", "(", "args", ")", "==", "6", ":", "return", "datetime", ".", "datetime", "(", "args", "[", "0", "]", ",", "args", "[", "1", "]", "+", "1", ",", "args", "[", "2", "]", ",", "args", "[", "3", "]", ",", "args", "[", "4", "]", ",", "args", "[", "5", "]", ",", "tzinfo", "=", "UTC", "(", ")", ")", "elif", "len", "(", "args", ")", "==", "7", ":", "args", "[", "6", "]", "*=", "1000", "return", "datetime", ".", "datetime", "(", "args", "[", "0", "]", ",", "args", "[", "1", "]", "+", "1", ",", "args", "[", "2", "]", ",", "args", "[", "3", "]", ",", "args", "[", "4", "]", ",", "args", "[", "5", "]", ",", "args", "[", "6", "]", ",", "tzinfo", "=", "UTC", "(", ")", ")", "raise", "ValueError", "(", "'Invalid number of arguments: %s'", "%", "json", ")" ]
[ 215, 4 ]
[ 238, 64 ]
python
en
['en', 'en', 'en']
True
generate_hours
(startdate: str, enddate: str, starthour: int, endhour: int, is_train: bool)
generates hours within the specified ranges for training or eval. Call this method twice, once with is_train=True and next with is_train=False Args: starthour (int): Start hour, in the range 0-23 endhour (int): End hour (inclusive), in the range 0-23 startdate (str): Year + Start Julian day, in the range 0-366 eg: 2018-109 enddate (str): Year + End Julian day (inclusive), in the range 0-366: 2018-109 is_train (bool): Generate training data or testing data? Yields: dict of {'hour': h, 'day': d, 'year': y}, one for each hour in the range
generates hours within the specified ranges for training or eval.
def generate_hours(startdate: str, enddate: str, starthour: int, endhour: int, is_train: bool): """generates hours within the specified ranges for training or eval. Call this method twice, once with is_train=True and next with is_train=False Args: starthour (int): Start hour, in the range 0-23 endhour (int): End hour (inclusive), in the range 0-23 startdate (str): Year + Start Julian day, in the range 0-366 eg: 2018-109 enddate (str): Year + End Julian day (inclusive), in the range 0-366: 2018-109 is_train (bool): Generate training data or testing data? Yields: dict of {'hour': h, 'day': d, 'year': y}, one for each hour in the range """ startyear = int(startdate[:4]) endyear = int(enddate[:4]) startday = int(startdate[5:]) endday = int(enddate[5:]) if endyear == startyear: yield from _generate_hours(starthour, endhour, startday, endday, startyear, is_train) else: # for startyear, go from startday to day#365 # FIXME: leap years? yield from _generate_hours(starthour, endhour, startday, 365, startyear, is_train) for y in range(startyear+1, endyear): yield from _generate_hours(starthour, endhour, 1, 365, endyear, is_train) yield from _generate_hours(starthour, endhour, 1, endday, endyear, is_train)
[ "def", "generate_hours", "(", "startdate", ":", "str", ",", "enddate", ":", "str", ",", "starthour", ":", "int", ",", "endhour", ":", "int", ",", "is_train", ":", "bool", ")", ":", "startyear", "=", "int", "(", "startdate", "[", ":", "4", "]", ")", "endyear", "=", "int", "(", "enddate", "[", ":", "4", "]", ")", "startday", "=", "int", "(", "startdate", "[", "5", ":", "]", ")", "endday", "=", "int", "(", "enddate", "[", "5", ":", "]", ")", "if", "endyear", "==", "startyear", ":", "yield", "from", "_generate_hours", "(", "starthour", ",", "endhour", ",", "startday", ",", "endday", ",", "startyear", ",", "is_train", ")", "else", ":", "# for startyear, go from startday to day#365", "# FIXME: leap years?", "yield", "from", "_generate_hours", "(", "starthour", ",", "endhour", ",", "startday", ",", "365", ",", "startyear", ",", "is_train", ")", "for", "y", "in", "range", "(", "startyear", "+", "1", ",", "endyear", ")", ":", "yield", "from", "_generate_hours", "(", "starthour", ",", "endhour", ",", "1", ",", "365", ",", "endyear", ",", "is_train", ")", "yield", "from", "_generate_hours", "(", "starthour", ",", "endhour", ",", "1", ",", "endday", ",", "endyear", ",", "is_train", ")" ]
[ 41, 0 ]
[ 66, 84 ]
python
en
['en', 'en', 'en']
True
_int64_feature
(value)
Wrapper for inserting int64 features into Example proto.
Wrapper for inserting int64 features into Example proto.
def _int64_feature(value): """Wrapper for inserting int64 features into Example proto.""" if not isinstance(value, list): value = [value] return tf.train.Feature(int64_list=tf.train.Int64List(value=value))
[ "def", "_int64_feature", "(", "value", ")", ":", "if", "not", "isinstance", "(", "value", ",", "list", ")", ":", "value", "=", "[", "value", "]", "return", "tf", ".", "train", ".", "Feature", "(", "int64_list", "=", "tf", ".", "train", ".", "Int64List", "(", "value", "=", "value", ")", ")" ]
[ 68, 0 ]
[ 72, 69 ]
python
en
['en', 'en', 'en']
True
_array_feature
(value, min_value, max_value)
Wrapper for inserting ndarray float features into Example proto.
Wrapper for inserting ndarray float features into Example proto.
def _array_feature(value, min_value, max_value): """Wrapper for inserting ndarray float features into Example proto.""" value = np.nan_to_num(value.flatten()) # nan, -inf, +inf to numbers value = np.clip(value, min_value, max_value) # clip to valid return tf.train.Feature(float_list=tf.train.FloatList(value=value))
[ "def", "_array_feature", "(", "value", ",", "min_value", ",", "max_value", ")", ":", "value", "=", "np", ".", "nan_to_num", "(", "value", ".", "flatten", "(", ")", ")", "# nan, -inf, +inf to numbers", "value", "=", "np", ".", "clip", "(", "value", ",", "min_value", ",", "max_value", ")", "# clip to valid", "return", "tf", ".", "train", ".", "Feature", "(", "float_list", "=", "tf", ".", "train", ".", "FloatList", "(", "value", "=", "value", ")", ")" ]
[ 75, 0 ]
[ 79, 69 ]
python
en
['en', 'en', 'en']
True
_bytes_feature
(value)
Wrapper for inserting bytes features into Example proto.
Wrapper for inserting bytes features into Example proto.
def _bytes_feature(value): """Wrapper for inserting bytes features into Example proto.""" return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
[ "def", "_bytes_feature", "(", "value", ")", ":", "return", "tf", ".", "train", ".", "Feature", "(", "bytes_list", "=", "tf", ".", "train", ".", "BytesList", "(", "value", "=", "[", "value", "]", ")", ")" ]
[ 82, 0 ]
[ 84, 71 ]
python
en
['en', 'en', 'en']
True
create_training_examples
(ref, ltg, ltgfcst, griddef, boxdef, samplingfrac)
Input function that yields dicts of CSV, tfrecord for each box in grid.
Input function that yields dicts of CSV, tfrecord for each box in grid.
def create_training_examples(ref, ltg, ltgfcst, griddef, boxdef, samplingfrac): """Input function that yields dicts of CSV, tfrecord for each box in grid.""" for example in boxdef.rawdata_input_fn(ref, ltg, griddef, ltgfcst): # write out all lightning patches, but only some of the non-lightning ones should_write = (example['has_ltg'] or random.random() < samplingfrac) if should_write: # create a CSV line consisting of extracted features csv_data = [ example['cy'], example['cx'], example['lat'], example['lon'], np.mean(example['ref_smallbox']), # mean within subgrid np.max(example['ref_smallbox']), np.mean(example['ref_bigbox']), np.max(example['ref_bigbox']), np.mean(example['ltg_smallbox']), np.mean(example['ltg_bigbox']), example['has_ltg'] ] csv_line = ','.join([str(v) for v in csv_data]) # create a TF Record with the raw data tfexample = tf.train.Example( features=tf.train.Features( feature={ 'cy': _int64_feature(example['cy']), 'cx': _int64_feature(example['cx']), 'lon': _array_feature(example['lon'], -180, 180), 'lat': _array_feature(example['lat'], -90, 90), 'ref': _array_feature(example['ref_bigbox'], 0, 1), 'ltg': _array_feature(example['ltg_bigbox'], 0, 1), 'has_ltg': _int64_feature(1 if example['has_ltg'] else 0) })) yield { 'csvline': csv_line, 'tfrecord': tfexample.SerializeToString(), 'ref': example['ref_center'], 'ltg' : example['ltg_center'] }
[ "def", "create_training_examples", "(", "ref", ",", "ltg", ",", "ltgfcst", ",", "griddef", ",", "boxdef", ",", "samplingfrac", ")", ":", "for", "example", "in", "boxdef", ".", "rawdata_input_fn", "(", "ref", ",", "ltg", ",", "griddef", ",", "ltgfcst", ")", ":", "# write out all lightning patches, but only some of the non-lightning ones", "should_write", "=", "(", "example", "[", "'has_ltg'", "]", "or", "random", ".", "random", "(", ")", "<", "samplingfrac", ")", "if", "should_write", ":", "# create a CSV line consisting of extracted features", "csv_data", "=", "[", "example", "[", "'cy'", "]", ",", "example", "[", "'cx'", "]", ",", "example", "[", "'lat'", "]", ",", "example", "[", "'lon'", "]", ",", "np", ".", "mean", "(", "example", "[", "'ref_smallbox'", "]", ")", ",", "# mean within subgrid", "np", ".", "max", "(", "example", "[", "'ref_smallbox'", "]", ")", ",", "np", ".", "mean", "(", "example", "[", "'ref_bigbox'", "]", ")", ",", "np", ".", "max", "(", "example", "[", "'ref_bigbox'", "]", ")", ",", "np", ".", "mean", "(", "example", "[", "'ltg_smallbox'", "]", ")", ",", "np", ".", "mean", "(", "example", "[", "'ltg_bigbox'", "]", ")", ",", "example", "[", "'has_ltg'", "]", "]", "csv_line", "=", "','", ".", "join", "(", "[", "str", "(", "v", ")", "for", "v", "in", "csv_data", "]", ")", "# create a TF Record with the raw data", "tfexample", "=", "tf", ".", "train", ".", "Example", "(", "features", "=", "tf", ".", "train", ".", "Features", "(", "feature", "=", "{", "'cy'", ":", "_int64_feature", "(", "example", "[", "'cy'", "]", ")", ",", "'cx'", ":", "_int64_feature", "(", "example", "[", "'cx'", "]", ")", ",", "'lon'", ":", "_array_feature", "(", "example", "[", "'lon'", "]", ",", "-", "180", ",", "180", ")", ",", "'lat'", ":", "_array_feature", "(", "example", "[", "'lat'", "]", ",", "-", "90", ",", "90", ")", ",", "'ref'", ":", "_array_feature", "(", "example", "[", "'ref_bigbox'", "]", ",", "0", ",", "1", ")", ",", "'ltg'", ":", "_array_feature", "(", "example", "[", "'ltg_bigbox'", "]", ",", "0", ",", "1", ")", ",", "'has_ltg'", ":", "_int64_feature", "(", "1", "if", "example", "[", "'has_ltg'", "]", "else", "0", ")", "}", ")", ")", "yield", "{", "'csvline'", ":", "csv_line", ",", "'tfrecord'", ":", "tfexample", ".", "SerializeToString", "(", ")", ",", "'ref'", ":", "example", "[", "'ref_center'", "]", ",", "'ltg'", ":", "example", "[", "'ltg_center'", "]", "}" ]
[ 87, 0 ]
[ 129, 7 ]
python
en
['en', 'en', 'en']
True
get_ir_blob_paths
(hours_dict, max_per_hour=None)
Get IR records in this hour.
Get IR records in this hour.
def get_ir_blob_paths(hours_dict, max_per_hour=None): """Get IR records in this hour.""" blob_paths = goesio.get_ir_blob_paths(hours_dict['year'], hours_dict['day'], hours_dict['hour']) if max_per_hour and len(blob_paths) > max_per_hour: blob_paths = blob_paths[:max_per_hour] for blob_path in blob_paths: yield blob_path
[ "def", "get_ir_blob_paths", "(", "hours_dict", ",", "max_per_hour", "=", "None", ")", ":", "blob_paths", "=", "goesio", ".", "get_ir_blob_paths", "(", "hours_dict", "[", "'year'", "]", ",", "hours_dict", "[", "'day'", "]", ",", "hours_dict", "[", "'hour'", "]", ")", "if", "max_per_hour", "and", "len", "(", "blob_paths", ")", ">", "max_per_hour", ":", "blob_paths", "=", "blob_paths", "[", ":", "max_per_hour", "]", "for", "blob_path", "in", "blob_paths", ":", "yield", "blob_path" ]
[ 132, 0 ]
[ 139, 19 ]
python
en
['en', 'en', 'en']
True
run_job
(options)
Run the job.
Run the job.
def run_job(options): # pylint: disable=redefined-outer-name """Run the job.""" # for repeatability random.seed(13) # prediction box boxdef = bd.BoxDef(options['train_patch_radius'], options['label_patch_radius'], options['stride']) griddef = goesio.create_conus_griddef(options['latlonres']) # start the pipeline opts = beam.pipeline.PipelineOptions(flags=[], **options) with beam.Pipeline(options['runner'], options=opts) as p: for step in ['train', 'eval']: # create examples examples = ( p | '{}_hours'.format(step) >> beam.Create( generate_hours(options['startday'], options['endday'], options['starthour'], options['endhour'], step == 'train')) | '{}_irblobs'.format(step) >> beam.FlatMap(lambda x: get_ir_blob_paths(x, options['max_per_hour'])) | '{}_examples'.format(step) >> beam.FlatMap( lambda ir_blob_path: # pylint: disable=g-long-lambda create_record(ir_blob_path, griddef, boxdef, options['forecast_interval'], options['lightning_validity'], options['sampling_frac']) )) # shuffle the examples so that each small batch doesn't contain # highly correlated records examples = (examples | '{}_reshuffleA'.format(step) >> beam.Map( lambda t: (random.randint(1, 1000), t)) | '{}_reshuffleB'.format(step) >> beam.GroupByKey() | '{}_reshuffleC'.format(step) >> beam.FlatMap(lambda t: t[1])) # write out center pixel statistics if step == 'train': _ = (examples | 'get_values' >> beam.FlatMap( lambda x : [(f, x[f]) for f in ['ref', 'ltg']]) | 'compute_stats' >> beam.CombinePerKey(MeanStddev()) | 'write_stats' >> beam.io.Write(beam.io.WriteToText( os.path.join(options['outdir'], 'stats'), num_shards=1)) ) # write out csv files _ = ( examples | '{}_csvlines'.format(step) >> beam.Map(lambda x: x['csvline']) | '{}_writecsv'.format(step) >> beam.io.Write( beam.io.WriteToText(os.path.join(options['outdir'], 'csv', step)))) # write out tfrecords _ = ( examples | '{}_tfrecords'.format(step) >> beam.Map(lambda x: x['tfrecord']) | '{}_writetfr'.format(step) >> beam.io.tfrecordio.WriteToTFRecord( os.path.join(options['outdir'], 'tfrecord', step)))
[ "def", "run_job", "(", "options", ")", ":", "# pylint: disable=redefined-outer-name", "# for repeatability", "random", ".", "seed", "(", "13", ")", "# prediction box", "boxdef", "=", "bd", ".", "BoxDef", "(", "options", "[", "'train_patch_radius'", "]", ",", "options", "[", "'label_patch_radius'", "]", ",", "options", "[", "'stride'", "]", ")", "griddef", "=", "goesio", ".", "create_conus_griddef", "(", "options", "[", "'latlonres'", "]", ")", "# start the pipeline", "opts", "=", "beam", ".", "pipeline", ".", "PipelineOptions", "(", "flags", "=", "[", "]", ",", "*", "*", "options", ")", "with", "beam", ".", "Pipeline", "(", "options", "[", "'runner'", "]", ",", "options", "=", "opts", ")", "as", "p", ":", "for", "step", "in", "[", "'train'", ",", "'eval'", "]", ":", "# create examples", "examples", "=", "(", "p", "|", "'{}_hours'", ".", "format", "(", "step", ")", ">>", "beam", ".", "Create", "(", "generate_hours", "(", "options", "[", "'startday'", "]", ",", "options", "[", "'endday'", "]", ",", "options", "[", "'starthour'", "]", ",", "options", "[", "'endhour'", "]", ",", "step", "==", "'train'", ")", ")", "|", "'{}_irblobs'", ".", "format", "(", "step", ")", ">>", "beam", ".", "FlatMap", "(", "lambda", "x", ":", "get_ir_blob_paths", "(", "x", ",", "options", "[", "'max_per_hour'", "]", ")", ")", "|", "'{}_examples'", ".", "format", "(", "step", ")", ">>", "beam", ".", "FlatMap", "(", "lambda", "ir_blob_path", ":", "# pylint: disable=g-long-lambda", "create_record", "(", "ir_blob_path", ",", "griddef", ",", "boxdef", ",", "options", "[", "'forecast_interval'", "]", ",", "options", "[", "'lightning_validity'", "]", ",", "options", "[", "'sampling_frac'", "]", ")", ")", ")", "# shuffle the examples so that each small batch doesn't contain", "# highly correlated records", "examples", "=", "(", "examples", "|", "'{}_reshuffleA'", ".", "format", "(", "step", ")", ">>", "beam", ".", "Map", "(", "lambda", "t", ":", "(", "random", ".", "randint", "(", "1", ",", "1000", ")", ",", "t", ")", ")", "|", "'{}_reshuffleB'", ".", "format", "(", "step", ")", ">>", "beam", ".", "GroupByKey", "(", ")", "|", "'{}_reshuffleC'", ".", "format", "(", "step", ")", ">>", "beam", ".", "FlatMap", "(", "lambda", "t", ":", "t", "[", "1", "]", ")", ")", "# write out center pixel statistics", "if", "step", "==", "'train'", ":", "_", "=", "(", "examples", "|", "'get_values'", ">>", "beam", ".", "FlatMap", "(", "lambda", "x", ":", "[", "(", "f", ",", "x", "[", "f", "]", ")", "for", "f", "in", "[", "'ref'", ",", "'ltg'", "]", "]", ")", "|", "'compute_stats'", ">>", "beam", ".", "CombinePerKey", "(", "MeanStddev", "(", ")", ")", "|", "'write_stats'", ">>", "beam", ".", "io", ".", "Write", "(", "beam", ".", "io", ".", "WriteToText", "(", "os", ".", "path", ".", "join", "(", "options", "[", "'outdir'", "]", ",", "'stats'", ")", ",", "num_shards", "=", "1", ")", ")", ")", "# write out csv files", "_", "=", "(", "examples", "|", "'{}_csvlines'", ".", "format", "(", "step", ")", ">>", "beam", ".", "Map", "(", "lambda", "x", ":", "x", "[", "'csvline'", "]", ")", "|", "'{}_writecsv'", ".", "format", "(", "step", ")", ">>", "beam", ".", "io", ".", "Write", "(", "beam", ".", "io", ".", "WriteToText", "(", "os", ".", "path", ".", "join", "(", "options", "[", "'outdir'", "]", ",", "'csv'", ",", "step", ")", ")", ")", ")", "# write out tfrecords", "_", "=", "(", "examples", "|", "'{}_tfrecords'", ".", "format", "(", "step", ")", ">>", "beam", ".", "Map", "(", "lambda", "x", ":", "x", "[", "'tfrecord'", "]", ")", "|", "'{}_writetfr'", ".", "format", "(", "step", ")", ">>", "beam", ".", "io", ".", "tfrecordio", ".", "WriteToTFRecord", "(", "os", ".", "path", ".", "join", "(", "options", "[", "'outdir'", "]", ",", "'tfrecord'", ",", "step", ")", ")", ")" ]
[ 211, 0 ]
[ 275, 65 ]
python
en
['en', 'fi', 'en']
True
parse
(version)
Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is a valid PEP 440 version or a legacy version.
Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is a valid PEP 440 version or a legacy version.
def parse(version): """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is a valid PEP 440 version or a legacy version. """ try: return Version(version) except InvalidVersion: return LegacyVersion(version)
[ "def", "parse", "(", "version", ")", ":", "try", ":", "return", "Version", "(", "version", ")", "except", "InvalidVersion", ":", "return", "LegacyVersion", "(", "version", ")" ]
[ 23, 0 ]
[ 32, 37 ]
python
en
['en', 'error', 'th']
False
_parse_local_version
(local)
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
def _parse_local_version(local): """ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). """ if local is not None: return tuple( part.lower() if not part.isdigit() else int(part) for part in _local_version_seperators.split(local) )
[ "def", "_parse_local_version", "(", "local", ")", ":", "if", "local", "is", "not", "None", ":", "return", "tuple", "(", "part", ".", "lower", "(", ")", "if", "not", "part", ".", "isdigit", "(", ")", "else", "int", "(", "part", ")", "for", "part", "in", "_local_version_seperators", ".", "split", "(", "local", ")", ")" ]
[ 331, 0 ]
[ 339, 9 ]
python
en
['en', 'error', 'th']
False
get_current_context
(silent=False)
Returns the current click context. This can be used as a way to access the current context object from anywhere. This is a more implicit alternative to the :func:`pass_context` decorator. This function is primarily useful for helpers such as :func:`echo` which might be interested in changing its behavior based on the current context. To push the current context, :meth:`Context.scope` can be used. .. versionadded:: 5.0 :param silent: is set to `True` the return value is `None` if no context is available. The default behavior is to raise a :exc:`RuntimeError`.
Returns the current click context. This can be used as a way to access the current context object from anywhere. This is a more implicit alternative to the :func:`pass_context` decorator. This function is primarily useful for helpers such as :func:`echo` which might be interested in changing its behavior based on the current context.
def get_current_context(silent=False): """Returns the current click context. This can be used as a way to access the current context object from anywhere. This is a more implicit alternative to the :func:`pass_context` decorator. This function is primarily useful for helpers such as :func:`echo` which might be interested in changing its behavior based on the current context. To push the current context, :meth:`Context.scope` can be used. .. versionadded:: 5.0 :param silent: is set to `True` the return value is `None` if no context is available. The default behavior is to raise a :exc:`RuntimeError`. """ try: return getattr(_local, 'stack')[-1] except (AttributeError, IndexError): if not silent: raise RuntimeError('There is no active click context.')
[ "def", "get_current_context", "(", "silent", "=", "False", ")", ":", "try", ":", "return", "getattr", "(", "_local", ",", "'stack'", ")", "[", "-", "1", "]", "except", "(", "AttributeError", ",", "IndexError", ")", ":", "if", "not", "silent", ":", "raise", "RuntimeError", "(", "'There is no active click context.'", ")" ]
[ 6, 0 ]
[ 25, 67 ]
python
en
['en', 'en', 'en']
True
push_context
(ctx)
Pushes a new context to the current stack.
Pushes a new context to the current stack.
def push_context(ctx): """Pushes a new context to the current stack.""" _local.__dict__.setdefault('stack', []).append(ctx)
[ "def", "push_context", "(", "ctx", ")", ":", "_local", ".", "__dict__", ".", "setdefault", "(", "'stack'", ",", "[", "]", ")", ".", "append", "(", "ctx", ")" ]
[ 28, 0 ]
[ 30, 55 ]
python
en
['en', 'en', 'en']
True
pop_context
()
Removes the top level from the stack.
Removes the top level from the stack.
def pop_context(): """Removes the top level from the stack.""" _local.stack.pop()
[ "def", "pop_context", "(", ")", ":", "_local", ".", "stack", ".", "pop", "(", ")" ]
[ 33, 0 ]
[ 35, 22 ]
python
en
['en', 'en', 'en']
True
resolve_color_default
(color=None)
Internal helper to get the default value of the color flag. If a value is passed it's returned unchanged, otherwise it's looked up from the current context.
Internal helper to get the default value of the color flag. If a value is passed it's returned unchanged, otherwise it's looked up from the current context.
def resolve_color_default(color=None): """"Internal helper to get the default value of the color flag. If a value is passed it's returned unchanged, otherwise it's looked up from the current context. """ if color is not None: return color ctx = get_current_context(silent=True) if ctx is not None: return ctx.color
[ "def", "resolve_color_default", "(", "color", "=", "None", ")", ":", "if", "color", "is", "not", "None", ":", "return", "color", "ctx", "=", "get_current_context", "(", "silent", "=", "True", ")", "if", "ctx", "is", "not", "None", ":", "return", "ctx", ".", "color" ]
[ 38, 0 ]
[ 47, 24 ]
python
en
['en', 'en', 'en']
True
check_setting_language_code
(app_configs, **kwargs)
Error if LANGUAGE_CODE setting is invalid.
Error if LANGUAGE_CODE setting is invalid.
def check_setting_language_code(app_configs, **kwargs): """Error if LANGUAGE_CODE setting is invalid.""" tag = settings.LANGUAGE_CODE if not isinstance(tag, str) or not language_code_re.match(tag): return [Error(E001.msg.format(tag), id=E001.id)] return []
[ "def", "check_setting_language_code", "(", "app_configs", ",", "*", "*", "kwargs", ")", ":", "tag", "=", "settings", ".", "LANGUAGE_CODE", "if", "not", "isinstance", "(", "tag", ",", "str", ")", "or", "not", "language_code_re", ".", "match", "(", "tag", ")", ":", "return", "[", "Error", "(", "E001", ".", "msg", ".", "format", "(", "tag", ")", ",", "id", "=", "E001", ".", "id", ")", "]", "return", "[", "]" ]
[ 29, 0 ]
[ 34, 13 ]
python
en
['en', 'ja', 'en']
True
check_setting_languages
(app_configs, **kwargs)
Error if LANGUAGES setting is invalid.
Error if LANGUAGES setting is invalid.
def check_setting_languages(app_configs, **kwargs): """Error if LANGUAGES setting is invalid.""" return [ Error(E002.msg.format(tag), id=E002.id) for tag, _ in settings.LANGUAGES if not isinstance(tag, str) or not language_code_re.match(tag) ]
[ "def", "check_setting_languages", "(", "app_configs", ",", "*", "*", "kwargs", ")", ":", "return", "[", "Error", "(", "E002", ".", "msg", ".", "format", "(", "tag", ")", ",", "id", "=", "E002", ".", "id", ")", "for", "tag", ",", "_", "in", "settings", ".", "LANGUAGES", "if", "not", "isinstance", "(", "tag", ",", "str", ")", "or", "not", "language_code_re", ".", "match", "(", "tag", ")", "]" ]
[ 38, 0 ]
[ 43, 5 ]
python
en
['en', 'en', 'en']
True
check_setting_languages_bidi
(app_configs, **kwargs)
Error if LANGUAGES_BIDI setting is invalid.
Error if LANGUAGES_BIDI setting is invalid.
def check_setting_languages_bidi(app_configs, **kwargs): """Error if LANGUAGES_BIDI setting is invalid.""" return [ Error(E003.msg.format(tag), id=E003.id) for tag in settings.LANGUAGES_BIDI if not isinstance(tag, str) or not language_code_re.match(tag) ]
[ "def", "check_setting_languages_bidi", "(", "app_configs", ",", "*", "*", "kwargs", ")", ":", "return", "[", "Error", "(", "E003", ".", "msg", ".", "format", "(", "tag", ")", ",", "id", "=", "E003", ".", "id", ")", "for", "tag", "in", "settings", ".", "LANGUAGES_BIDI", "if", "not", "isinstance", "(", "tag", ",", "str", ")", "or", "not", "language_code_re", ".", "match", "(", "tag", ")", "]" ]
[ 47, 0 ]
[ 52, 5 ]
python
en
['en', 'et', 'en']
True
check_language_settings_consistent
(app_configs, **kwargs)
Error if language settings are not consistent with each other.
Error if language settings are not consistent with each other.
def check_language_settings_consistent(app_configs, **kwargs): """Error if language settings are not consistent with each other.""" try: get_supported_language_variant(settings.LANGUAGE_CODE) except LookupError: return [E004] else: return []
[ "def", "check_language_settings_consistent", "(", "app_configs", ",", "*", "*", "kwargs", ")", ":", "try", ":", "get_supported_language_variant", "(", "settings", ".", "LANGUAGE_CODE", ")", "except", "LookupError", ":", "return", "[", "E004", "]", "else", ":", "return", "[", "]" ]
[ 56, 0 ]
[ 63, 17 ]
python
en
['en', 'en', 'en']
True
validate_password
(password, user=None, password_validators=None)
Validate whether the password meets all validator requirements. If the password is valid, return ``None``. If the password is invalid, raise ValidationError with all error messages.
Validate whether the password meets all validator requirements.
def validate_password(password, user=None, password_validators=None): """ Validate whether the password meets all validator requirements. If the password is valid, return ``None``. If the password is invalid, raise ValidationError with all error messages. """ errors = [] if password_validators is None: password_validators = get_default_password_validators() for validator in password_validators: try: validator.validate(password, user) except ValidationError as error: errors.append(error) if errors: raise ValidationError(errors)
[ "def", "validate_password", "(", "password", ",", "user", "=", "None", ",", "password_validators", "=", "None", ")", ":", "errors", "=", "[", "]", "if", "password_validators", "is", "None", ":", "password_validators", "=", "get_default_password_validators", "(", ")", "for", "validator", "in", "password_validators", ":", "try", ":", "validator", ".", "validate", "(", "password", ",", "user", ")", "except", "ValidationError", "as", "error", ":", "errors", ".", "append", "(", "error", ")", "if", "errors", ":", "raise", "ValidationError", "(", "errors", ")" ]
[ 34, 0 ]
[ 50, 37 ]
python
en
['en', 'error', 'th']
False
password_changed
(password, user=None, password_validators=None)
Inform all validators that have implemented a password_changed() method that the password has been changed.
Inform all validators that have implemented a password_changed() method that the password has been changed.
def password_changed(password, user=None, password_validators=None): """ Inform all validators that have implemented a password_changed() method that the password has been changed. """ if password_validators is None: password_validators = get_default_password_validators() for validator in password_validators: password_changed = getattr(validator, 'password_changed', lambda *a: None) password_changed(password, user)
[ "def", "password_changed", "(", "password", ",", "user", "=", "None", ",", "password_validators", "=", "None", ")", ":", "if", "password_validators", "is", "None", ":", "password_validators", "=", "get_default_password_validators", "(", ")", "for", "validator", "in", "password_validators", ":", "password_changed", "=", "getattr", "(", "validator", ",", "'password_changed'", ",", "lambda", "*", "a", ":", "None", ")", "password_changed", "(", "password", ",", "user", ")" ]
[ 53, 0 ]
[ 62, 40 ]
python
en
['en', 'error', 'th']
False
password_validators_help_texts
(password_validators=None)
Return a list of all help texts of all configured validators.
Return a list of all help texts of all configured validators.
def password_validators_help_texts(password_validators=None): """ Return a list of all help texts of all configured validators. """ help_texts = [] if password_validators is None: password_validators = get_default_password_validators() for validator in password_validators: help_texts.append(validator.get_help_text()) return help_texts
[ "def", "password_validators_help_texts", "(", "password_validators", "=", "None", ")", ":", "help_texts", "=", "[", "]", "if", "password_validators", "is", "None", ":", "password_validators", "=", "get_default_password_validators", "(", ")", "for", "validator", "in", "password_validators", ":", "help_texts", ".", "append", "(", "validator", ".", "get_help_text", "(", ")", ")", "return", "help_texts" ]
[ 65, 0 ]
[ 74, 21 ]
python
en
['en', 'error', 'th']
False
_password_validators_help_text_html
(password_validators=None)
Return an HTML string with all help texts of all configured validators in an <ul>.
Return an HTML string with all help texts of all configured validators in an <ul>.
def _password_validators_help_text_html(password_validators=None): """ Return an HTML string with all help texts of all configured validators in an <ul>. """ help_texts = password_validators_help_texts(password_validators) help_items = format_html_join('', '<li>{}</li>', ((help_text,) for help_text in help_texts)) return format_html('<ul>{}</ul>', help_items) if help_items else ''
[ "def", "_password_validators_help_text_html", "(", "password_validators", "=", "None", ")", ":", "help_texts", "=", "password_validators_help_texts", "(", "password_validators", ")", "help_items", "=", "format_html_join", "(", "''", ",", "'<li>{}</li>'", ",", "(", "(", "help_text", ",", ")", "for", "help_text", "in", "help_texts", ")", ")", "return", "format_html", "(", "'<ul>{}</ul>'", ",", "help_items", ")", "if", "help_items", "else", "''" ]
[ 77, 0 ]
[ 84, 71 ]
python
en
['en', 'error', 'th']
False
EmailBackend.send_messages
(self, messages)
Redirect messages to the dummy outbox
Redirect messages to the dummy outbox
def send_messages(self, messages): """Redirect messages to the dummy outbox""" msg_count = 0 for message in messages: # .message() triggers header validation message.message() mail.outbox.append(message) msg_count += 1 return msg_count
[ "def", "send_messages", "(", "self", ",", "messages", ")", ":", "msg_count", "=", "0", "for", "message", "in", "messages", ":", "# .message() triggers header validation", "message", ".", "message", "(", ")", "mail", ".", "outbox", ".", "append", "(", "message", ")", "msg_count", "+=", "1", "return", "msg_count" ]
[ 22, 4 ]
[ 29, 24 ]
python
en
['en', 'en', 'en']
True
BaseSpatialOperations.geo_db_type
(self, f)
Return the database column type for the geometry field on the spatial backend.
Return the database column type for the geometry field on the spatial backend.
def geo_db_type(self, f): """ Return the database column type for the geometry field on the spatial backend. """ raise NotImplementedError('subclasses of BaseSpatialOperations must provide a geo_db_type() method')
[ "def", "geo_db_type", "(", "self", ",", "f", ")", ":", "raise", "NotImplementedError", "(", "'subclasses of BaseSpatialOperations must provide a geo_db_type() method'", ")" ]
[ 61, 4 ]
[ 66, 108 ]
python
en
['en', 'error', 'th']
False
BaseSpatialOperations.get_distance
(self, f, value, lookup_type)
Return the distance parameters for the given geometry field, lookup value, and lookup type.
Return the distance parameters for the given geometry field, lookup value, and lookup type.
def get_distance(self, f, value, lookup_type): """ Return the distance parameters for the given geometry field, lookup value, and lookup type. """ raise NotImplementedError('Distance operations not available on this spatial backend.')
[ "def", "get_distance", "(", "self", ",", "f", ",", "value", ",", "lookup_type", ")", ":", "raise", "NotImplementedError", "(", "'Distance operations not available on this spatial backend.'", ")" ]
[ 68, 4 ]
[ 73, 95 ]
python
en
['en', 'error', 'th']
False
BaseSpatialOperations.get_geom_placeholder
(self, f, value, compiler)
Return the placeholder for the given geometry field with the given value. Depending on the spatial backend, the placeholder may contain a stored procedure call to the transformation function of the spatial backend.
Return the placeholder for the given geometry field with the given value. Depending on the spatial backend, the placeholder may contain a stored procedure call to the transformation function of the spatial backend.
def get_geom_placeholder(self, f, value, compiler): """ Return the placeholder for the given geometry field with the given value. Depending on the spatial backend, the placeholder may contain a stored procedure call to the transformation function of the spatial backend. """ def transform_value(value, field): return value is not None and value.srid != field.srid if hasattr(value, 'as_sql'): return ( '%s(%%s, %s)' % (self.spatial_function_name('Transform'), f.srid) if transform_value(value.output_field, f) else '%s' ) if transform_value(value, f): # Add Transform() to the SQL placeholder. return '%s(%s(%%s,%s), %s)' % ( self.spatial_function_name('Transform'), self.from_text, value.srid, f.srid, ) elif self.connection.features.has_spatialrefsys_table: return '%s(%%s,%s)' % (self.from_text, f.srid) else: # For backwards compatibility on MySQL (#27464). return '%s(%%s)' % self.from_text
[ "def", "get_geom_placeholder", "(", "self", ",", "f", ",", "value", ",", "compiler", ")", ":", "def", "transform_value", "(", "value", ",", "field", ")", ":", "return", "value", "is", "not", "None", "and", "value", ".", "srid", "!=", "field", ".", "srid", "if", "hasattr", "(", "value", ",", "'as_sql'", ")", ":", "return", "(", "'%s(%%s, %s)'", "%", "(", "self", ".", "spatial_function_name", "(", "'Transform'", ")", ",", "f", ".", "srid", ")", "if", "transform_value", "(", "value", ".", "output_field", ",", "f", ")", "else", "'%s'", ")", "if", "transform_value", "(", "value", ",", "f", ")", ":", "# Add Transform() to the SQL placeholder.", "return", "'%s(%s(%%s,%s), %s)'", "%", "(", "self", ".", "spatial_function_name", "(", "'Transform'", ")", ",", "self", ".", "from_text", ",", "value", ".", "srid", ",", "f", ".", "srid", ",", ")", "elif", "self", ".", "connection", ".", "features", ".", "has_spatialrefsys_table", ":", "return", "'%s(%%s,%s)'", "%", "(", "self", ".", "from_text", ",", "f", ".", "srid", ")", "else", ":", "# For backwards compatibility on MySQL (#27464).", "return", "'%s(%%s)'", "%", "self", ".", "from_text" ]
[ 75, 4 ]
[ 101, 45 ]
python
en
['en', 'error', 'th']
False
L_star_fun
(L_star)
This calcualtes the zero of the optimal well spacing, L_star.
This calcualtes the zero of the optimal well spacing, L_star.
def L_star_fun(L_star): """ This calcualtes the zero of the optimal well spacing, L_star. """ import numpy as np # import pudb; pudb.set_trace() return L_star**2.0*np.log(L_star/D) - \ 2.0*np.pi*rho_w*T/viscosity * \ ((alphaII*rho_r-rho_w)*gravity*reservoir_depth) * \ Cpw*t_inj/(b_*(rho_w*Cpw*porosity+rho_r*Cpr*(1.0-porosity)))
[ "def", "L_star_fun", "(", "L_star", ")", ":", "import", "numpy", "as", "np", "# import pudb; pudb.set_trace()", "return", "L_star", "**", "2.0", "*", "np", ".", "log", "(", "L_star", "/", "D", ")", "-", "2.0", "*", "np", ".", "pi", "*", "rho_w", "*", "T", "/", "viscosity", "*", "(", "(", "alphaII", "*", "rho_r", "-", "rho_w", ")", "*", "gravity", "*", "reservoir_depth", ")", "*", "Cpw", "*", "t_inj", "/", "(", "b_", "*", "(", "rho_w", "*", "Cpw", "*", "porosity", "+", "rho_r", "*", "Cpr", "*", "(", "1.0", "-", "porosity", ")", ")", ")" ]
[ 30, 0 ]
[ 39, 71 ]
python
en
['en', 'error', 'th']
False
L_star_fun2
(L_star)
This calcualtes the optimal well spacing, L_star, if the reservoir constraints imply a flow rate that is higher than the flow rate that would minimize the LCOH. define capital_costs and CRF externally
This calcualtes the optimal well spacing, L_star, if the reservoir constraints imply a flow rate that is higher than the flow rate that would minimize the LCOH.
def L_star_fun2(L_star): """ This calcualtes the optimal well spacing, L_star, if the reservoir constraints imply a flow rate that is higher than the flow rate that would minimize the LCOH. define capital_costs and CRF externally """ import numpy as np return (capital_cost_internal*CRF*rho_w*rho_w * np.pi * permeability_*b_ / (2.0*dollars_per_kWhth*joule_to_kWh*t_inj*viscosity * np.log(L_star/D)))**0.5 - \ (rho_w*Cpw*porosity + rho_r*Cpr*(1.0-porosity)) * \ (L_star**2 * b_)/ (Cpw*t_inj)
[ "def", "L_star_fun2", "(", "L_star", ")", ":", "import", "numpy", "as", "np", "return", "(", "capital_cost_internal", "*", "CRF", "*", "rho_w", "*", "rho_w", "*", "np", ".", "pi", "*", "permeability_", "*", "b_", "/", "(", "2.0", "*", "dollars_per_kWhth", "*", "joule_to_kWh", "*", "t_inj", "*", "viscosity", "*", "np", ".", "log", "(", "L_star", "/", "D", ")", ")", ")", "**", "0.5", "-", "(", "rho_w", "*", "Cpw", "*", "porosity", "+", "rho_r", "*", "Cpr", "*", "(", "1.0", "-", "porosity", ")", ")", "*", "(", "L_star", "**", "2", "*", "b_", ")", "/", "(", "Cpw", "*", "t_inj", ")" ]
[ 41, 0 ]
[ 55, 40 ]
python
en
['en', 'error', 'th']
False
_pad_for_encryption
(message, target_length)
r"""Pads the message for encryption, returning the padded message. :return: 00 02 RANDOM_DATA 00 MESSAGE >>> block = _pad_for_encryption(b'hello', 16) >>> len(block) 16 >>> block[0:2] b'\x00\x02' >>> block[-6:] b'\x00hello'
r"""Pads the message for encryption, returning the padded message.
def _pad_for_encryption(message, target_length): r"""Pads the message for encryption, returning the padded message. :return: 00 02 RANDOM_DATA 00 MESSAGE >>> block = _pad_for_encryption(b'hello', 16) >>> len(block) 16 >>> block[0:2] b'\x00\x02' >>> block[-6:] b'\x00hello' """ max_msglength = target_length - 11 msglength = len(message) if msglength > max_msglength: raise OverflowError('%i bytes needed for message, but there is only' ' space for %i' % (msglength, max_msglength)) # Get random padding padding = b'' padding_length = target_length - msglength - 3 # We remove 0-bytes, so we'll end up with less padding than we've asked for, # so keep adding data until we're at the correct length. while len(padding) < padding_length: needed_bytes = padding_length - len(padding) # Always read at least 8 bytes more than we need, and trim off the rest # after removing the 0-bytes. This increases the chance of getting # enough bytes, especially when needed_bytes is small new_padding = os.urandom(needed_bytes + 5) new_padding = new_padding.replace(b'\x00', b'') padding = padding + new_padding[:needed_bytes] assert len(padding) == padding_length return b''.join([b'\x00\x02', padding, b'\x00', message])
[ "def", "_pad_for_encryption", "(", "message", ",", "target_length", ")", ":", "max_msglength", "=", "target_length", "-", "11", "msglength", "=", "len", "(", "message", ")", "if", "msglength", ">", "max_msglength", ":", "raise", "OverflowError", "(", "'%i bytes needed for message, but there is only'", "' space for %i'", "%", "(", "msglength", ",", "max_msglength", ")", ")", "# Get random padding", "padding", "=", "b''", "padding_length", "=", "target_length", "-", "msglength", "-", "3", "# We remove 0-bytes, so we'll end up with less padding than we've asked for,", "# so keep adding data until we're at the correct length.", "while", "len", "(", "padding", ")", "<", "padding_length", ":", "needed_bytes", "=", "padding_length", "-", "len", "(", "padding", ")", "# Always read at least 8 bytes more than we need, and trim off the rest", "# after removing the 0-bytes. This increases the chance of getting", "# enough bytes, especially when needed_bytes is small", "new_padding", "=", "os", ".", "urandom", "(", "needed_bytes", "+", "5", ")", "new_padding", "=", "new_padding", ".", "replace", "(", "b'\\x00'", ",", "b''", ")", "padding", "=", "padding", "+", "new_padding", "[", ":", "needed_bytes", "]", "assert", "len", "(", "padding", ")", "==", "padding_length", "return", "b''", ".", "join", "(", "[", "b'\\x00\\x02'", ",", "padding", ",", "b'\\x00'", ",", "message", "]", ")" ]
[ 68, 0 ]
[ 111, 30 ]
python
en
['en', 'en', 'en']
True
_pad_for_signing
(message, target_length)
r"""Pads the message for signing, returning the padded message. The padding is always a repetition of FF bytes. :return: 00 01 PADDING 00 MESSAGE >>> block = _pad_for_signing(b'hello', 16) >>> len(block) 16 >>> block[0:2] b'\x00\x01' >>> block[-6:] b'\x00hello' >>> block[2:-6] b'\xff\xff\xff\xff\xff\xff\xff\xff'
r"""Pads the message for signing, returning the padded message.
def _pad_for_signing(message, target_length): r"""Pads the message for signing, returning the padded message. The padding is always a repetition of FF bytes. :return: 00 01 PADDING 00 MESSAGE >>> block = _pad_for_signing(b'hello', 16) >>> len(block) 16 >>> block[0:2] b'\x00\x01' >>> block[-6:] b'\x00hello' >>> block[2:-6] b'\xff\xff\xff\xff\xff\xff\xff\xff' """ max_msglength = target_length - 11 msglength = len(message) if msglength > max_msglength: raise OverflowError('%i bytes needed for message, but there is only' ' space for %i' % (msglength, max_msglength)) padding_length = target_length - msglength - 3 return b''.join([b'\x00\x01', padding_length * b'\xff', b'\x00', message])
[ "def", "_pad_for_signing", "(", "message", ",", "target_length", ")", ":", "max_msglength", "=", "target_length", "-", "11", "msglength", "=", "len", "(", "message", ")", "if", "msglength", ">", "max_msglength", ":", "raise", "OverflowError", "(", "'%i bytes needed for message, but there is only'", "' space for %i'", "%", "(", "msglength", ",", "max_msglength", ")", ")", "padding_length", "=", "target_length", "-", "msglength", "-", "3", "return", "b''", ".", "join", "(", "[", "b'\\x00\\x01'", ",", "padding_length", "*", "b'\\xff'", ",", "b'\\x00'", ",", "message", "]", ")" ]
[ 114, 0 ]
[ 145, 30 ]
python
en
['en', 'en', 'en']
True
encrypt
(message, pub_key)
Encrypts the given message using PKCS#1 v1.5 :param message: the message to encrypt. Must be a byte string no longer than ``k-11`` bytes, where ``k`` is the number of bytes needed to encode the ``n`` component of the public key. :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with. :raise OverflowError: when the message is too large to fit in the padded block. >>> from rsa import key, common >>> (pub_key, priv_key) = key.newkeys(256) >>> message = b'hello' >>> crypto = encrypt(message, pub_key) The crypto text should be just as long as the public key 'n' component: >>> len(crypto) == common.byte_size(pub_key.n) True
Encrypts the given message using PKCS#1 v1.5
def encrypt(message, pub_key): """Encrypts the given message using PKCS#1 v1.5 :param message: the message to encrypt. Must be a byte string no longer than ``k-11`` bytes, where ``k`` is the number of bytes needed to encode the ``n`` component of the public key. :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with. :raise OverflowError: when the message is too large to fit in the padded block. >>> from rsa import key, common >>> (pub_key, priv_key) = key.newkeys(256) >>> message = b'hello' >>> crypto = encrypt(message, pub_key) The crypto text should be just as long as the public key 'n' component: >>> len(crypto) == common.byte_size(pub_key.n) True """ keylength = common.byte_size(pub_key.n) padded = _pad_for_encryption(message, keylength) payload = transform.bytes2int(padded) encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n) block = transform.int2bytes(encrypted, keylength) return block
[ "def", "encrypt", "(", "message", ",", "pub_key", ")", ":", "keylength", "=", "common", ".", "byte_size", "(", "pub_key", ".", "n", ")", "padded", "=", "_pad_for_encryption", "(", "message", ",", "keylength", ")", "payload", "=", "transform", ".", "bytes2int", "(", "padded", ")", "encrypted", "=", "core", ".", "encrypt_int", "(", "payload", ",", "pub_key", ".", "e", ",", "pub_key", ".", "n", ")", "block", "=", "transform", ".", "int2bytes", "(", "encrypted", ",", "keylength", ")", "return", "block" ]
[ 148, 0 ]
[ 177, 16 ]
python
en
['en', 'en', 'en']
True
decrypt
(crypto, priv_key)
r"""Decrypts the given message using PKCS#1 v1.5 The decryption is considered 'failed' when the resulting cleartext doesn't start with the bytes 00 02, or when the 00 byte between the padding and the message cannot be found. :param crypto: the crypto text as returned by :py:func:`rsa.encrypt` :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with. :raise DecryptionError: when the decryption fails. No details are given as to why the code thinks the decryption fails, as this would leak information about the private key. >>> import rsa >>> (pub_key, priv_key) = rsa.newkeys(256) It works with strings: >>> crypto = encrypt(b'hello', pub_key) >>> decrypt(crypto, priv_key) b'hello' And with binary data: >>> crypto = encrypt(b'\x00\x00\x00\x00\x01', pub_key) >>> decrypt(crypto, priv_key) b'\x00\x00\x00\x00\x01' Altering the encrypted information will *likely* cause a :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use :py:func:`rsa.sign`. .. warning:: Never display the stack trace of a :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the code the exception occurred, and thus leaks information about the key. It's only a tiny bit of information, but every bit makes cracking the keys easier. >>> crypto = encrypt(b'hello', pub_key) >>> crypto = crypto[0:5] + b'X' + crypto[6:] # change a byte >>> decrypt(crypto, priv_key) Traceback (most recent call last): ... rsa.pkcs1.DecryptionError: Decryption failed
r"""Decrypts the given message using PKCS#1 v1.5
def decrypt(crypto, priv_key): r"""Decrypts the given message using PKCS#1 v1.5 The decryption is considered 'failed' when the resulting cleartext doesn't start with the bytes 00 02, or when the 00 byte between the padding and the message cannot be found. :param crypto: the crypto text as returned by :py:func:`rsa.encrypt` :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with. :raise DecryptionError: when the decryption fails. No details are given as to why the code thinks the decryption fails, as this would leak information about the private key. >>> import rsa >>> (pub_key, priv_key) = rsa.newkeys(256) It works with strings: >>> crypto = encrypt(b'hello', pub_key) >>> decrypt(crypto, priv_key) b'hello' And with binary data: >>> crypto = encrypt(b'\x00\x00\x00\x00\x01', pub_key) >>> decrypt(crypto, priv_key) b'\x00\x00\x00\x00\x01' Altering the encrypted information will *likely* cause a :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use :py:func:`rsa.sign`. .. warning:: Never display the stack trace of a :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the code the exception occurred, and thus leaks information about the key. It's only a tiny bit of information, but every bit makes cracking the keys easier. >>> crypto = encrypt(b'hello', pub_key) >>> crypto = crypto[0:5] + b'X' + crypto[6:] # change a byte >>> decrypt(crypto, priv_key) Traceback (most recent call last): ... rsa.pkcs1.DecryptionError: Decryption failed """ blocksize = common.byte_size(priv_key.n) encrypted = transform.bytes2int(crypto) decrypted = priv_key.blinded_decrypt(encrypted) cleartext = transform.int2bytes(decrypted, blocksize) # If we can't find the cleartext marker, decryption failed. if cleartext[0:2] != b'\x00\x02': raise DecryptionError('Decryption failed') # Find the 00 separator between the padding and the message try: sep_idx = cleartext.index(b'\x00', 2) except ValueError: raise DecryptionError('Decryption failed') return cleartext[sep_idx + 1:]
[ "def", "decrypt", "(", "crypto", ",", "priv_key", ")", ":", "blocksize", "=", "common", ".", "byte_size", "(", "priv_key", ".", "n", ")", "encrypted", "=", "transform", ".", "bytes2int", "(", "crypto", ")", "decrypted", "=", "priv_key", ".", "blinded_decrypt", "(", "encrypted", ")", "cleartext", "=", "transform", ".", "int2bytes", "(", "decrypted", ",", "blocksize", ")", "# If we can't find the cleartext marker, decryption failed.", "if", "cleartext", "[", "0", ":", "2", "]", "!=", "b'\\x00\\x02'", ":", "raise", "DecryptionError", "(", "'Decryption failed'", ")", "# Find the 00 separator between the padding and the message", "try", ":", "sep_idx", "=", "cleartext", ".", "index", "(", "b'\\x00'", ",", "2", ")", "except", "ValueError", ":", "raise", "DecryptionError", "(", "'Decryption failed'", ")", "return", "cleartext", "[", "sep_idx", "+", "1", ":", "]" ]
[ 180, 0 ]
[ 246, 34 ]
python
en
['en', 'en', 'en']
True
sign_hash
(hash_value, priv_key, hash_method)
Signs a precomputed hash with the private key. Hashes the message, then signs the hash with the given key. This is known as a "detached signature", because the message itself isn't altered. :param hash_value: A precomputed hash to sign (ignores message). Should be set to None if needing to hash and sign message. :param priv_key: the :py:class:`rsa.PrivateKey` to sign with :param hash_method: the hash method used on the message. Use 'MD5', 'SHA-1', 'SHA-224', SHA-256', 'SHA-384' or 'SHA-512'. :return: a message signature block. :raise OverflowError: if the private key is too small to contain the requested hash.
Signs a precomputed hash with the private key.
def sign_hash(hash_value, priv_key, hash_method): """Signs a precomputed hash with the private key. Hashes the message, then signs the hash with the given key. This is known as a "detached signature", because the message itself isn't altered. :param hash_value: A precomputed hash to sign (ignores message). Should be set to None if needing to hash and sign message. :param priv_key: the :py:class:`rsa.PrivateKey` to sign with :param hash_method: the hash method used on the message. Use 'MD5', 'SHA-1', 'SHA-224', SHA-256', 'SHA-384' or 'SHA-512'. :return: a message signature block. :raise OverflowError: if the private key is too small to contain the requested hash. """ # Get the ASN1 code for this hash method if hash_method not in HASH_ASN1: raise ValueError('Invalid hash method: %s' % hash_method) asn1code = HASH_ASN1[hash_method] # Encrypt the hash with the private key cleartext = asn1code + hash_value keylength = common.byte_size(priv_key.n) padded = _pad_for_signing(cleartext, keylength) payload = transform.bytes2int(padded) encrypted = priv_key.blinded_encrypt(payload) block = transform.int2bytes(encrypted, keylength) return block
[ "def", "sign_hash", "(", "hash_value", ",", "priv_key", ",", "hash_method", ")", ":", "# Get the ASN1 code for this hash method", "if", "hash_method", "not", "in", "HASH_ASN1", ":", "raise", "ValueError", "(", "'Invalid hash method: %s'", "%", "hash_method", ")", "asn1code", "=", "HASH_ASN1", "[", "hash_method", "]", "# Encrypt the hash with the private key", "cleartext", "=", "asn1code", "+", "hash_value", "keylength", "=", "common", ".", "byte_size", "(", "priv_key", ".", "n", ")", "padded", "=", "_pad_for_signing", "(", "cleartext", ",", "keylength", ")", "payload", "=", "transform", ".", "bytes2int", "(", "padded", ")", "encrypted", "=", "priv_key", ".", "blinded_encrypt", "(", "payload", ")", "block", "=", "transform", ".", "int2bytes", "(", "encrypted", ",", "keylength", ")", "return", "block" ]
[ 249, 0 ]
[ 280, 16 ]
python
en
['en', 'en', 'en']
True
sign
(message, priv_key, hash_method)
Signs the message with the private key. Hashes the message, then signs the hash with the given key. This is known as a "detached signature", because the message itself isn't altered. :param message: the message to sign. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. :param priv_key: the :py:class:`rsa.PrivateKey` to sign with :param hash_method: the hash method used on the message. Use 'MD5', 'SHA-1', 'SHA-224', SHA-256', 'SHA-384' or 'SHA-512'. :return: a message signature block. :raise OverflowError: if the private key is too small to contain the requested hash.
Signs the message with the private key.
def sign(message, priv_key, hash_method): """Signs the message with the private key. Hashes the message, then signs the hash with the given key. This is known as a "detached signature", because the message itself isn't altered. :param message: the message to sign. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. :param priv_key: the :py:class:`rsa.PrivateKey` to sign with :param hash_method: the hash method used on the message. Use 'MD5', 'SHA-1', 'SHA-224', SHA-256', 'SHA-384' or 'SHA-512'. :return: a message signature block. :raise OverflowError: if the private key is too small to contain the requested hash. """ msg_hash = compute_hash(message, hash_method) return sign_hash(msg_hash, priv_key, hash_method)
[ "def", "sign", "(", "message", ",", "priv_key", ",", "hash_method", ")", ":", "msg_hash", "=", "compute_hash", "(", "message", ",", "hash_method", ")", "return", "sign_hash", "(", "msg_hash", ",", "priv_key", ",", "hash_method", ")" ]
[ 283, 0 ]
[ 302, 53 ]
python
en
['en', 'en', 'en']
True
verify
(message, signature, pub_key)
Verifies that the signature matches the message. The hash method is detected automatically from the signature. :param message: the signed message. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. :param signature: the signature block, as created with :py:func:`rsa.sign`. :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. :raise VerificationError: when the signature doesn't match the message. :returns: the name of the used hash.
Verifies that the signature matches the message.
def verify(message, signature, pub_key): """Verifies that the signature matches the message. The hash method is detected automatically from the signature. :param message: the signed message. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. :param signature: the signature block, as created with :py:func:`rsa.sign`. :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. :raise VerificationError: when the signature doesn't match the message. :returns: the name of the used hash. """ keylength = common.byte_size(pub_key.n) encrypted = transform.bytes2int(signature) decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n) clearsig = transform.int2bytes(decrypted, keylength) # Get the hash method method_name = _find_method_hash(clearsig) message_hash = compute_hash(message, method_name) # Reconstruct the expected padded hash cleartext = HASH_ASN1[method_name] + message_hash expected = _pad_for_signing(cleartext, keylength) # Compare with the signed one if expected != clearsig: raise VerificationError('Verification failed') return method_name
[ "def", "verify", "(", "message", ",", "signature", ",", "pub_key", ")", ":", "keylength", "=", "common", ".", "byte_size", "(", "pub_key", ".", "n", ")", "encrypted", "=", "transform", ".", "bytes2int", "(", "signature", ")", "decrypted", "=", "core", ".", "decrypt_int", "(", "encrypted", ",", "pub_key", ".", "e", ",", "pub_key", ".", "n", ")", "clearsig", "=", "transform", ".", "int2bytes", "(", "decrypted", ",", "keylength", ")", "# Get the hash method", "method_name", "=", "_find_method_hash", "(", "clearsig", ")", "message_hash", "=", "compute_hash", "(", "message", ",", "method_name", ")", "# Reconstruct the expected padded hash", "cleartext", "=", "HASH_ASN1", "[", "method_name", "]", "+", "message_hash", "expected", "=", "_pad_for_signing", "(", "cleartext", ",", "keylength", ")", "# Compare with the signed one", "if", "expected", "!=", "clearsig", ":", "raise", "VerificationError", "(", "'Verification failed'", ")", "return", "method_name" ]
[ 305, 0 ]
[ 337, 22 ]
python
en
['en', 'en', 'en']
True
find_signature_hash
(signature, pub_key)
Returns the hash name detected from the signature. If you also want to verify the message, use :py:func:`rsa.verify()` instead. It also returns the name of the used hash. :param signature: the signature block, as created with :py:func:`rsa.sign`. :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. :returns: the name of the used hash.
Returns the hash name detected from the signature.
def find_signature_hash(signature, pub_key): """Returns the hash name detected from the signature. If you also want to verify the message, use :py:func:`rsa.verify()` instead. It also returns the name of the used hash. :param signature: the signature block, as created with :py:func:`rsa.sign`. :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. :returns: the name of the used hash. """ keylength = common.byte_size(pub_key.n) encrypted = transform.bytes2int(signature) decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n) clearsig = transform.int2bytes(decrypted, keylength) return _find_method_hash(clearsig)
[ "def", "find_signature_hash", "(", "signature", ",", "pub_key", ")", ":", "keylength", "=", "common", ".", "byte_size", "(", "pub_key", ".", "n", ")", "encrypted", "=", "transform", ".", "bytes2int", "(", "signature", ")", "decrypted", "=", "core", ".", "decrypt_int", "(", "encrypted", ",", "pub_key", ".", "e", ",", "pub_key", ".", "n", ")", "clearsig", "=", "transform", ".", "int2bytes", "(", "decrypted", ",", "keylength", ")", "return", "_find_method_hash", "(", "clearsig", ")" ]
[ 340, 0 ]
[ 356, 38 ]
python
en
['en', 'en', 'en']
True
yield_fixedblocks
(infile, blocksize)
Generator, yields each block of ``blocksize`` bytes in the input file. :param infile: file to read and separate in blocks. :param blocksize: block size in bytes. :returns: a generator that yields the contents of each block
Generator, yields each block of ``blocksize`` bytes in the input file.
def yield_fixedblocks(infile, blocksize): """Generator, yields each block of ``blocksize`` bytes in the input file. :param infile: file to read and separate in blocks. :param blocksize: block size in bytes. :returns: a generator that yields the contents of each block """ while True: block = infile.read(blocksize) read_bytes = len(block) if read_bytes == 0: break yield block if read_bytes < blocksize: break
[ "def", "yield_fixedblocks", "(", "infile", ",", "blocksize", ")", ":", "while", "True", ":", "block", "=", "infile", ".", "read", "(", "blocksize", ")", "read_bytes", "=", "len", "(", "block", ")", "if", "read_bytes", "==", "0", ":", "break", "yield", "block", "if", "read_bytes", "<", "blocksize", ":", "break" ]
[ 359, 0 ]
[ 377, 17 ]
python
en
['en', 'en', 'en']
True
compute_hash
(message, method_name)
Returns the message digest. :param message: the signed message. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. :param method_name: the hash method, must be a key of :py:const:`HASH_METHODS`.
Returns the message digest.
def compute_hash(message, method_name): """Returns the message digest. :param message: the signed message. Can be an 8-bit string or a file-like object. If ``message`` has a ``read()`` method, it is assumed to be a file-like object. :param method_name: the hash method, must be a key of :py:const:`HASH_METHODS`. """ if method_name not in HASH_METHODS: raise ValueError('Invalid hash method: %s' % method_name) method = HASH_METHODS[method_name] hasher = method() if hasattr(message, 'read') and hasattr(message.read, '__call__'): # read as 1K blocks for block in yield_fixedblocks(message, 1024): hasher.update(block) else: # hash the message object itself. hasher.update(message) return hasher.digest()
[ "def", "compute_hash", "(", "message", ",", "method_name", ")", ":", "if", "method_name", "not", "in", "HASH_METHODS", ":", "raise", "ValueError", "(", "'Invalid hash method: %s'", "%", "method_name", ")", "method", "=", "HASH_METHODS", "[", "method_name", "]", "hasher", "=", "method", "(", ")", "if", "hasattr", "(", "message", ",", "'read'", ")", "and", "hasattr", "(", "message", ".", "read", ",", "'__call__'", ")", ":", "# read as 1K blocks", "for", "block", "in", "yield_fixedblocks", "(", "message", ",", "1024", ")", ":", "hasher", ".", "update", "(", "block", ")", "else", ":", "# hash the message object itself.", "hasher", ".", "update", "(", "message", ")", "return", "hasher", ".", "digest", "(", ")" ]
[ 380, 0 ]
[ 405, 26 ]
python
en
['en', 'en', 'en']
True
_find_method_hash
(clearsig)
Finds the hash method. :param clearsig: full padded ASN1 and hash. :return: the used hash method. :raise VerificationFailed: when the hash method cannot be found
Finds the hash method.
def _find_method_hash(clearsig): """Finds the hash method. :param clearsig: full padded ASN1 and hash. :return: the used hash method. :raise VerificationFailed: when the hash method cannot be found """ for (hashname, asn1code) in HASH_ASN1.items(): if asn1code in clearsig: return hashname raise VerificationError('Verification failed')
[ "def", "_find_method_hash", "(", "clearsig", ")", ":", "for", "(", "hashname", ",", "asn1code", ")", "in", "HASH_ASN1", ".", "items", "(", ")", ":", "if", "asn1code", "in", "clearsig", ":", "return", "hashname", "raise", "VerificationError", "(", "'Verification failed'", ")" ]
[ 408, 0 ]
[ 420, 50 ]
python
en
['en', 'en', 'en']
True
ContactList.search
(self, name)
Return all contacts that contain the search value
Return all contacts that contain the search value
def search(self, name): "Return all contacts that contain the search value" matching_contacts = [] for contact in self: if name in contact.name: matching_contacts.append(contact) return matching_contacts
[ "def", "search", "(", "self", ",", "name", ")", ":", "matching_contacts", "=", "[", "]", "for", "contact", "in", "self", ":", "if", "name", "in", "contact", ".", "name", ":", "matching_contacts", ".", "append", "(", "contact", ")", "return", "matching_contacts" ]
[ 19, 4 ]
[ 25, 32 ]
python
en
['en', 'en', 'en']
True
_parse_ld_musl_from_elf
(f: IO[bytes])
Detect musl libc location by parsing the Python executable. Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
Detect musl libc location by parsing the Python executable.
def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]: """Detect musl libc location by parsing the Python executable. Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html """ f.seek(0) try: ident = _read_unpacked(f, "16B") except struct.error: return None if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF. return None f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version. try: # e_fmt: Format for program header. # p_fmt: Format for section header. # p_idx: Indexes to find p_type, p_offset, and p_filesz. e_fmt, p_fmt, p_idx = { 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit. 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit. }[ident[4]] except KeyError: return None else: p_get = operator.itemgetter(*p_idx) # Find the interpreter section and return its content. try: _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt) except struct.error: return None for i in range(e_phnum + 1): f.seek(e_phoff + e_phentsize * i) try: p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt)) except struct.error: return None if p_type != 3: # Not PT_INTERP. continue f.seek(p_offset) interpreter = os.fsdecode(f.read(p_filesz)).strip("\0") if "musl" not in interpreter: return None return interpreter return None
[ "def", "_parse_ld_musl_from_elf", "(", "f", ":", "IO", "[", "bytes", "]", ")", "->", "Optional", "[", "str", "]", ":", "f", ".", "seek", "(", "0", ")", "try", ":", "ident", "=", "_read_unpacked", "(", "f", ",", "\"16B\"", ")", "except", "struct", ".", "error", ":", "return", "None", "if", "ident", "[", ":", "4", "]", "!=", "tuple", "(", "b\"\\x7fELF\"", ")", ":", "# Invalid magic, not ELF.", "return", "None", "f", ".", "seek", "(", "struct", ".", "calcsize", "(", "\"HHI\"", ")", ",", "1", ")", "# Skip file type, machine, and version.", "try", ":", "# e_fmt: Format for program header.", "# p_fmt: Format for section header.", "# p_idx: Indexes to find p_type, p_offset, and p_filesz.", "e_fmt", ",", "p_fmt", ",", "p_idx", "=", "{", "1", ":", "(", "\"IIIIHHH\"", ",", "\"IIIIIIII\"", ",", "(", "0", ",", "1", ",", "4", ")", ")", ",", "# 32-bit.", "2", ":", "(", "\"QQQIHHH\"", ",", "\"IIQQQQQQ\"", ",", "(", "0", ",", "2", ",", "5", ")", ")", ",", "# 64-bit.", "}", "[", "ident", "[", "4", "]", "]", "except", "KeyError", ":", "return", "None", "else", ":", "p_get", "=", "operator", ".", "itemgetter", "(", "*", "p_idx", ")", "# Find the interpreter section and return its content.", "try", ":", "_", ",", "e_phoff", ",", "_", ",", "_", ",", "_", ",", "e_phentsize", ",", "e_phnum", "=", "_read_unpacked", "(", "f", ",", "e_fmt", ")", "except", "struct", ".", "error", ":", "return", "None", "for", "i", "in", "range", "(", "e_phnum", "+", "1", ")", ":", "f", ".", "seek", "(", "e_phoff", "+", "e_phentsize", "*", "i", ")", "try", ":", "p_type", ",", "p_offset", ",", "p_filesz", "=", "p_get", "(", "_read_unpacked", "(", "f", ",", "p_fmt", ")", ")", "except", "struct", ".", "error", ":", "return", "None", "if", "p_type", "!=", "3", ":", "# Not PT_INTERP.", "continue", "f", ".", "seek", "(", "p_offset", ")", "interpreter", "=", "os", ".", "fsdecode", "(", "f", ".", "read", "(", "p_filesz", ")", ")", ".", "strip", "(", "\"\\0\"", ")", "if", "\"musl\"", "not", "in", "interpreter", ":", "return", "None", "return", "interpreter", "return", "None" ]
[ 21, 0 ]
[ 67, 15 ]
python
en
['en', 'en', 'en']
True
_get_musl_version
(executable: str)
Detect currently-running musl runtime version. This is done by checking the specified executable's dynamic linking information, and invoking the loader to parse its output for a version string. If the loader is musl, the output would be something like:: musl libc (x86_64) Version 1.2.2 Dynamic Program Loader
Detect currently-running musl runtime version.
def _get_musl_version(executable: str) -> Optional[_MuslVersion]: """Detect currently-running musl runtime version. This is done by checking the specified executable's dynamic linking information, and invoking the loader to parse its output for a version string. If the loader is musl, the output would be something like:: musl libc (x86_64) Version 1.2.2 Dynamic Program Loader """ with contextlib.ExitStack() as stack: try: f = stack.enter_context(open(executable, "rb")) except IOError: return None ld = _parse_ld_musl_from_elf(f) if not ld: return None proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True) return _parse_musl_version(proc.stderr)
[ "def", "_get_musl_version", "(", "executable", ":", "str", ")", "->", "Optional", "[", "_MuslVersion", "]", ":", "with", "contextlib", ".", "ExitStack", "(", ")", "as", "stack", ":", "try", ":", "f", "=", "stack", ".", "enter_context", "(", "open", "(", "executable", ",", "\"rb\"", ")", ")", "except", "IOError", ":", "return", "None", "ld", "=", "_parse_ld_musl_from_elf", "(", "f", ")", "if", "not", "ld", ":", "return", "None", "proc", "=", "subprocess", ".", "run", "(", "[", "ld", "]", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "universal_newlines", "=", "True", ")", "return", "_parse_musl_version", "(", "proc", ".", "stderr", ")" ]
[ 86, 0 ]
[ 106, 43 ]
python
en
['en', 'en', 'en']
True
platform_tags
(arch: str)
Generate musllinux tags compatible to the current platform. :param arch: Should be the part of platform tag after the ``linux_`` prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a prerequisite for the current platform to be musllinux-compatible. :returns: An iterator of compatible musllinux tags.
Generate musllinux tags compatible to the current platform.
def platform_tags(arch: str) -> Iterator[str]: """Generate musllinux tags compatible to the current platform. :param arch: Should be the part of platform tag after the ``linux_`` prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a prerequisite for the current platform to be musllinux-compatible. :returns: An iterator of compatible musllinux tags. """ sys_musl = _get_musl_version(sys.executable) if sys_musl is None: # Python not dynamically linked against musl. return for minor in range(sys_musl.minor, -1, -1): yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
[ "def", "platform_tags", "(", "arch", ":", "str", ")", "->", "Iterator", "[", "str", "]", ":", "sys_musl", "=", "_get_musl_version", "(", "sys", ".", "executable", ")", "if", "sys_musl", "is", "None", ":", "# Python not dynamically linked against musl.", "return", "for", "minor", "in", "range", "(", "sys_musl", ".", "minor", ",", "-", "1", ",", "-", "1", ")", ":", "yield", "f\"musllinux_{sys_musl.major}_{minor}_{arch}\"" ]
[ 109, 0 ]
[ 122, 58 ]
python
en
['en', 'en', 'en']
True
format_num
(num, decplaces=10)
Converts a number into a more a readable string-version.
Converts a number into a more a readable string-version.
def format_num(num, decplaces=10): "Converts a number into a more a readable string-version." try: dec = Decimal(num) # Cut the decimal off at "precision" decimal places. if decplaces < 1: dec = dec.quantize(Decimal("0")) else: # Set our precision to at least 28 or twice our precision, lest # Decimal.quantize complains about "result has too many digits". getcontext().prec = max(28, int(decplaces * 2)) dec = dec.quantize(Decimal(".{}".format("0" * decplaces))) except: return "bad" # Split the decimal into sign, digits and exponent. tup = dec.as_tuple() delta = len(tup.digits) + tup.exponent digits = "".join(str(d) for d in tup.digits) # Put the number back together considering the delta. if delta <= 0: zeros = abs(tup.exponent) - len(tup.digits) val = "0." + ("0" * zeros) + digits else: val = digits[:delta] + ("0" * tup.exponent) + '.' + digits[delta:] # Strip trailing 0s and/or trailing dot: val = val.rstrip("0") if val[-1] == ".": val = val[:-1] if tup.sign: return "-" + val return val
[ "def", "format_num", "(", "num", ",", "decplaces", "=", "10", ")", ":", "try", ":", "dec", "=", "Decimal", "(", "num", ")", "# Cut the decimal off at \"precision\" decimal places.", "if", "decplaces", "<", "1", ":", "dec", "=", "dec", ".", "quantize", "(", "Decimal", "(", "\"0\"", ")", ")", "else", ":", "# Set our precision to at least 28 or twice our precision, lest", "# Decimal.quantize complains about \"result has too many digits\".", "getcontext", "(", ")", ".", "prec", "=", "max", "(", "28", ",", "int", "(", "decplaces", "*", "2", ")", ")", "dec", "=", "dec", ".", "quantize", "(", "Decimal", "(", "\".{}\"", ".", "format", "(", "\"0\"", "*", "decplaces", ")", ")", ")", "except", ":", "return", "\"bad\"", "# Split the decimal into sign, digits and exponent.", "tup", "=", "dec", ".", "as_tuple", "(", ")", "delta", "=", "len", "(", "tup", ".", "digits", ")", "+", "tup", ".", "exponent", "digits", "=", "\"\"", ".", "join", "(", "str", "(", "d", ")", "for", "d", "in", "tup", ".", "digits", ")", "# Put the number back together considering the delta.", "if", "delta", "<=", "0", ":", "zeros", "=", "abs", "(", "tup", ".", "exponent", ")", "-", "len", "(", "tup", ".", "digits", ")", "val", "=", "\"0.\"", "+", "(", "\"0\"", "*", "zeros", ")", "+", "digits", "else", ":", "val", "=", "digits", "[", ":", "delta", "]", "+", "(", "\"0\"", "*", "tup", ".", "exponent", ")", "+", "'.'", "+", "digits", "[", "delta", ":", "]", "# Strip trailing 0s and/or trailing dot:", "val", "=", "val", ".", "rstrip", "(", "\"0\"", ")", "if", "val", "[", "-", "1", "]", "==", "\".\"", ":", "val", "=", "val", "[", ":", "-", "1", "]", "if", "tup", ".", "sign", ":", "return", "\"-\"", "+", "val", "return", "val" ]
[ 6, 0 ]
[ 37, 14 ]
python
en
['en', 'en', 'en']
True
rot
(message, shift=3)
Employs the Ceasar Cipher to encode/decode messages.
Employs the Ceasar Cipher to encode/decode messages.
def rot(message, shift=3): "Employs the Ceasar Cipher to encode/decode messages." alphabet = ascii_lowercase shifted_alphabet = alphabet[shift:] + alphabet[:shift] table = maketrans(alphabet, shifted_alphabet) return message.lower().translate(table)
[ "def", "rot", "(", "message", ",", "shift", "=", "3", ")", ":", "alphabet", "=", "ascii_lowercase", "shifted_alphabet", "=", "alphabet", "[", "shift", ":", "]", "+", "alphabet", "[", ":", "shift", "]", "table", "=", "maketrans", "(", "alphabet", ",", "shifted_alphabet", ")", "return", "message", ".", "lower", "(", ")", ".", "translate", "(", "table", ")" ]
[ 40, 0 ]
[ 45, 43 ]
python
en
['en', 'en', 'en']
True