desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'a // b'
| def __rfloordiv__(b, a):
| div = (a / b)
if isinstance(div, Rational):
return (div.numerator // div.denominator)
else:
return math.floor(div)
|
'a % b'
| def __mod__(a, b):
| div = (a // b)
return (a - (b * div))
|
'a % b'
| def __rmod__(b, a):
| div = (a // b)
return (a - (b * div))
|
'a ** b
If b is not an integer, the result will be a float or complex
since roots are generally irrational. If b is an integer, the
result will be rational.'
| def __pow__(a, b):
| if isinstance(b, Rational):
if (b.denominator == 1):
power = b.numerator
if (power >= 0):
return Fraction((a._numerator ** power), (a._denominator ** power))
else:
return Fraction((a._denominator ** (- power)), (a._numerator ** (- power)))
else:
return (float(a) ** float(b))
else:
return (float(a) ** b)
|
'a ** b'
| def __rpow__(b, a):
| if ((b._denominator == 1) and (b._numerator >= 0)):
return (a ** b._numerator)
if isinstance(a, Rational):
return (Fraction(a.numerator, a.denominator) ** b)
if (b._denominator == 1):
return (a ** b._numerator)
return (a ** float(b))
|
'+a: Coerces a subclass instance to Fraction'
| def __pos__(a):
| return Fraction(a._numerator, a._denominator)
|
'-a'
| def __neg__(a):
| return Fraction((- a._numerator), a._denominator)
|
'abs(a)'
| def __abs__(a):
| return Fraction(abs(a._numerator), a._denominator)
|
'trunc(a)'
| def __trunc__(a):
| if (a._numerator < 0):
return (- ((- a._numerator) // a._denominator))
else:
return (a._numerator // a._denominator)
|
'hash(self)
Tricky because values that are exactly representable as a
float must have the same hash as that float.'
| def __hash__(self):
| if (self._denominator == 1):
return hash(self._numerator)
if (self == float(self)):
return hash(float(self))
else:
return hash((self._numerator, self._denominator))
|
'a == b'
| def __eq__(a, b):
| if isinstance(b, Rational):
return ((a._numerator == b.numerator) and (a._denominator == b.denominator))
if (isinstance(b, numbers.Complex) and (b.imag == 0)):
b = b.real
if isinstance(b, float):
if (math.isnan(b) or math.isinf(b)):
return (0.0 == b)
else:
return (a == a.from_float(b))
else:
return NotImplemented
|
'Helper for comparison operators, for internal use only.
Implement comparison between a Rational instance `self`, and
either another Rational instance or a float `other`. If
`other` is not a Rational instance or a float, return
NotImplemented. `op` should be one of the six standard
comparison operators.'
| def _richcmp(self, other, op):
| if isinstance(other, Rational):
return op((self._numerator * other.denominator), (self._denominator * other.numerator))
if isinstance(other, complex):
raise TypeError('no ordering relation is defined for complex numbers')
if isinstance(other, float):
if (math.isnan(other) or math.isinf(other)):
return op(0.0, other)
else:
return op(self, self.from_float(other))
else:
return NotImplemented
|
'a < b'
| def __lt__(a, b):
| return a._richcmp(b, operator.lt)
|
'a > b'
| def __gt__(a, b):
| return a._richcmp(b, operator.gt)
|
'a <= b'
| def __le__(a, b):
| return a._richcmp(b, operator.le)
|
'a >= b'
| def __ge__(a, b):
| return a._richcmp(b, operator.ge)
|
'a != 0'
| def __nonzero__(a):
| return (a._numerator != 0)
|
'Create directories under ~'
| def create_home_path(self):
| if (not self.user):
return
home = convert_path(os.path.expanduser('~'))
for (name, path) in self.config_vars.iteritems():
if (path.startswith(home) and (not os.path.isdir(path))):
self.debug_print(("os.makedirs('%s', 0700)" % path))
os.makedirs(path, 448)
|
'Return true if the current distribution has any Python
modules to install.'
| def has_lib(self):
| return (self.distribution.has_pure_modules() or self.distribution.has_ext_modules())
|
'Deprecated API.'
| def check_metadata(self):
| warn('distutils.command.register.check_metadata is deprecated, use the check command instead', PendingDeprecationWarning)
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.strict = self.strict
check.restructuredtext = 1
check.run()
|
'Reads the configuration file and set attributes.'
| def _set_config(self):
| config = self._read_pypirc()
if (config != {}):
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
self.has_config = True
else:
if (self.repository not in ('pypi', self.DEFAULT_REPOSITORY)):
raise ValueError(('%s not found in .pypirc' % self.repository))
if (self.repository == 'pypi'):
self.repository = self.DEFAULT_REPOSITORY
self.has_config = False
|
'Fetch the list of classifiers from the server.'
| def classifiers(self):
| response = urllib2.urlopen((self.repository + '?:action=list_classifiers'))
log.info(response.read())
|
'Send the metadata to the package index server to be checked.'
| def verify_metadata(self):
| (code, result) = self.post_to_server(self.build_post_data('verify'))
log.info(('Server response (%s): %s' % (code, result)))
|
'Send the metadata to the package index server.
Well, do the following:
1. figure who the user is, and then
2. send the data as a Basic auth\'ed POST.
First we try to read the username/password from $HOME/.pypirc,
which is a ConfigParser-formatted file with a section
[distutils] containing username and password entries (both
in clear text). Eg:
[distutils]
index-servers =
pypi
[pypi]
username: fred
password: sekrit
Otherwise, to figure who the user is, we offer the user three
choices:
1. use existing login,
2. register as a new user, or
3. set the password to a random string and email the user.'
| def send_metadata(self):
| if self.has_config:
choice = '1'
username = self.username
password = self.password
else:
choice = 'x'
username = password = ''
choices = '1 2 3 4'.split()
while (choice not in choices):
self.announce('We need to know who you are, so please choose either:\n 1. use your existing login,\n 2. register as a new user,\n 3. have the server generate a new password for you (and email it to you), or\n 4. quit\nYour selection [default 1]: ', log.INFO)
choice = raw_input()
if (not choice):
choice = '1'
elif (choice not in choices):
print 'Please choose one of the four options!'
if (choice == '1'):
while (not username):
username = raw_input('Username: ')
while (not password):
password = getpass.getpass('Password: ')
auth = urllib2.HTTPPasswordMgr()
host = urlparse.urlparse(self.repository)[1]
auth.add_password(self.realm, host, username, password)
(code, result) = self.post_to_server(self.build_post_data('submit'), auth)
self.announce(('Server response (%s): %s' % (code, result)), log.INFO)
if (code == 200):
if self.has_config:
self.distribution.password = password
else:
self.announce('I can store your PyPI login so future submissions will be faster.', log.INFO)
self.announce(('(the login will be stored in %s)' % self._get_rc_file()), log.INFO)
choice = 'X'
while (choice.lower() not in 'yn'):
choice = raw_input('Save your login (y/N)?')
if (not choice):
choice = 'n'
if (choice.lower() == 'y'):
self._store_pypirc(username, password)
elif (choice == '2'):
data = {':action': 'user'}
data['name'] = data['password'] = data['email'] = ''
data['confirm'] = None
while (not data['name']):
data['name'] = raw_input('Username: ')
while (data['password'] != data['confirm']):
while (not data['password']):
data['password'] = getpass.getpass('Password: ')
while (not data['confirm']):
data['confirm'] = getpass.getpass(' Confirm: ')
if (data['password'] != data['confirm']):
data['password'] = ''
data['confirm'] = None
print "Password and confirm don't match!"
while (not data['email']):
data['email'] = raw_input(' EMail: ')
(code, result) = self.post_to_server(data)
if (code != 200):
log.info(('Server response (%s): %s' % (code, result)))
else:
log.info('You will receive an email shortly.')
log.info('Follow the instructions in it to complete registration.')
elif (choice == '3'):
data = {':action': 'password_reset'}
data['email'] = ''
while (not data['email']):
data['email'] = raw_input('Your email address: ')
(code, result) = self.post_to_server(data)
log.info(('Server response (%s): %s' % (code, result)))
|
'Post a query to the server, and return a string response.'
| def post_to_server(self, data, auth=None):
| if ('name' in data):
self.announce(('Registering %s to %s' % (data['name'], self.repository)), log.INFO)
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = ('\n--' + boundary)
end_boundary = (sep_boundary + '--')
body = StringIO.StringIO()
for (key, value) in data.items():
if (type(value) not in (type([]), type(()))):
value = [value]
for value in value:
body.write(sep_boundary)
body.write(('\nContent-Disposition: form-data; name="%s"' % key))
body.write('\n\n')
body.write(value)
if (value and (value[(-1)] == '\r')):
body.write('\n')
body.write(end_boundary)
body.write('\n')
body = body.getvalue()
headers = {'Content-type': ('multipart/form-data; boundary=%s; charset=utf-8' % boundary), 'Content-length': str(len(body))}
req = urllib2.Request(self.repository, body, headers)
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_mgr=auth))
data = ''
try:
result = opener.open(req)
except urllib2.HTTPError as e:
if self.show_response:
data = e.fp.read()
result = (e.code, e.msg)
except urllib2.URLError as e:
result = (500, str(e))
else:
if self.show_response:
data = result.read()
result = (200, 'OK')
if self.show_response:
dashes = ('-' * 75)
self.announce(('%s%s%s' % (dashes, data, dashes)))
return result
|
'Generate list of \'(package,src_dir,build_dir,filenames)\' tuples'
| def get_data_files(self):
| data = []
if (not self.packages):
return data
for package in self.packages:
src_dir = self.get_package_dir(package)
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
plen = 0
if src_dir:
plen = (len(src_dir) + 1)
filenames = [file[plen:] for file in self.find_data_files(package, src_dir)]
data.append((package, src_dir, build_dir, filenames))
return data
|
'Return filenames for package\'s data files in \'src_dir\''
| def find_data_files(self, package, src_dir):
| globs = (self.package_data.get('', []) + self.package_data.get(package, []))
files = []
for pattern in globs:
filelist = glob(os.path.join(src_dir, convert_path(pattern)))
files.extend([fn for fn in filelist if (fn not in files)])
return files
|
'Copy data files into build directory'
| def build_package_data(self):
| for (package, src_dir, build_dir, filenames) in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
self.copy_file(os.path.join(src_dir, filename), target, preserve_mode=False)
|
'Return the directory, relative to the top of the source
distribution, where package \'package\' should be found
(at least according to the \'package_dir\' option, if any).'
| def get_package_dir(self, package):
| path = package.split('.')
if (not self.package_dir):
if path:
return os.path.join(*path)
else:
return ''
else:
tail = []
while path:
try:
pdir = self.package_dir['.'.join(path)]
except KeyError:
tail.insert(0, path[(-1)])
del path[(-1)]
else:
tail.insert(0, pdir)
return os.path.join(*tail)
else:
pdir = self.package_dir.get('')
if (pdir is not None):
tail.insert(0, pdir)
if tail:
return os.path.join(*tail)
else:
return ''
|
'Finds individually-specified Python modules, ie. those listed by
module name in \'self.py_modules\'. Returns a list of tuples (package,
module_base, filename): \'package\' is a tuple of the path through
package-space to the module; \'module_base\' is the bare (no
packages, no dots) module name, and \'filename\' is the path to the
".py" file (relative to the distribution root) that implements the
module.'
| def find_modules(self):
| packages = {}
modules = []
for module in self.py_modules:
path = module.split('.')
package = '.'.join(path[0:(-1)])
module_base = path[(-1)]
try:
(package_dir, checked) = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
checked = 0
if (not checked):
init_py = self.check_package(package, package_dir)
packages[package] = (package_dir, 1)
if init_py:
modules.append((package, '__init__', init_py))
module_file = os.path.join(package_dir, (module_base + '.py'))
if (not self.check_module(module, module_file)):
continue
modules.append((package, module_base, module_file))
return modules
|
'Compute the list of all modules that will be built, whether
they are specified one-module-at-a-time (\'self.py_modules\') or
by whole packages (\'self.packages\'). Return a list of tuples
(package, module, module_file), just like \'find_modules()\' and
\'find_package_modules()\' do.'
| def find_all_modules(self):
| modules = []
if self.py_modules:
modules.extend(self.find_modules())
if self.packages:
for package in self.packages:
package_dir = self.get_package_dir(package)
m = self.find_package_modules(package, package_dir)
modules.extend(m)
return modules
|
'Check that \'self.compiler\' really is a CCompiler object;
if not, make it one.'
| def _check_compiler(self):
| from distutils.ccompiler import CCompiler, new_compiler
if (not isinstance(self.compiler, CCompiler)):
self.compiler = new_compiler(compiler=self.compiler, dry_run=self.dry_run, force=1)
customize_compiler(self.compiler)
if self.include_dirs:
self.compiler.set_include_dirs(self.include_dirs)
if self.libraries:
self.compiler.set_libraries(self.libraries)
if self.library_dirs:
self.compiler.set_library_dirs(self.library_dirs)
|
'Construct a source file from \'body\' (a string containing lines
of C/C++ code) and \'headers\' (a list of header files to include)
and run it through the preprocessor. Return true if the
preprocessor succeeded, false if there were any errors.
(\'body\' probably isn\'t of much use, but what the heck.)'
| def try_cpp(self, body=None, headers=None, include_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError
self._check_compiler()
ok = 1
try:
self._preprocess(body, headers, include_dirs, lang)
except CompileError:
ok = 0
self._clean()
return ok
|
'Construct a source file (just like \'try_cpp()\'), run it through
the preprocessor, and return true if any line of the output matches
\'pattern\'. \'pattern\' should either be a compiled regex object or a
string containing a regex. If both \'body\' and \'headers\' are None,
preprocesses an empty file -- which can be useful to determine the
symbols the preprocessor and compiler set by default.'
| def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang='c'):
| self._check_compiler()
(src, out) = self._preprocess(body, headers, include_dirs, lang)
if isinstance(pattern, str):
pattern = re.compile(pattern)
file = open(out)
match = 0
while 1:
line = file.readline()
if (line == ''):
break
if pattern.search(line):
match = 1
break
file.close()
self._clean()
return match
|
'Try to compile a source file built from \'body\' and \'headers\'.
Return true on success, false otherwise.'
| def try_compile(self, body, headers=None, include_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError
self._check_compiler()
try:
self._compile(body, headers, include_dirs, lang)
ok = 1
except CompileError:
ok = 0
log.info(((ok and 'success!') or 'failure.'))
self._clean()
return ok
|
'Try to compile and link a source file, built from \'body\' and
\'headers\', to executable form. Return true on success, false
otherwise.'
| def try_link(self, body, headers=None, include_dirs=None, libraries=None, library_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError, LinkError
self._check_compiler()
try:
self._link(body, headers, include_dirs, libraries, library_dirs, lang)
ok = 1
except (CompileError, LinkError):
ok = 0
log.info(((ok and 'success!') or 'failure.'))
self._clean()
return ok
|
'Try to compile, link to an executable, and run a program
built from \'body\' and \'headers\'. Return true on success, false
otherwise.'
| def try_run(self, body, headers=None, include_dirs=None, libraries=None, library_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError, LinkError
self._check_compiler()
try:
(src, obj, exe) = self._link(body, headers, include_dirs, libraries, library_dirs, lang)
self.spawn([exe])
ok = 1
except (CompileError, LinkError, DistutilsExecError):
ok = 0
log.info(((ok and 'success!') or 'failure.'))
self._clean()
return ok
|
'Determine if function \'func\' is available by constructing a
source file that refers to \'func\', and compiles and links it.
If everything succeeds, returns true; otherwise returns false.
The constructed source file starts out by including the header
files listed in \'headers\'. If \'decl\' is true, it then declares
\'func\' (as "int func()"); you probably shouldn\'t supply \'headers\'
and set \'decl\' true in the same call, or you might get errors about
a conflicting declarations for \'func\'. Finally, the constructed
\'main()\' function either references \'func\' or (if \'call\' is true)
calls it. \'libraries\' and \'library_dirs\' are used when
linking.'
| def check_func(self, func, headers=None, include_dirs=None, libraries=None, library_dirs=None, decl=0, call=0):
| self._check_compiler()
body = []
if decl:
body.append(('int %s ();' % func))
body.append('int main () {')
if call:
body.append((' %s();' % func))
else:
body.append((' %s;' % func))
body.append('}')
body = ('\n'.join(body) + '\n')
return self.try_link(body, headers, include_dirs, libraries, library_dirs)
|
'Determine if \'library\' is available to be linked against,
without actually checking that any particular symbols are provided
by it. \'headers\' will be used in constructing the source file to
be compiled, but the only effect of this is to check if all the
header files listed are available. Any libraries listed in
\'other_libraries\' will be included in the link, in case \'library\'
has symbols that depend on other libraries.'
| def check_lib(self, library, library_dirs=None, headers=None, include_dirs=None, other_libraries=[]):
| self._check_compiler()
return self.try_link('int main (void) { }', headers, include_dirs, ([library] + other_libraries), library_dirs)
|
'Determine if the system header file named by \'header_file\'
exists and can be found by the preprocessor; return true if so,
false otherwise.'
| def check_header(self, header, include_dirs=None, library_dirs=None, lang='c'):
| return self.try_cpp(body='/* No body */', headers=[header], include_dirs=include_dirs)
|
'Sets default values for options.'
| def initialize_options(self):
| self.restructuredtext = 0
self.metadata = 1
self.strict = 0
self._warnings = 0
|
'Counts the number of warnings that occurs.'
| def warn(self, msg):
| self._warnings += 1
return Command.warn(self, msg)
|
'Runs the command.'
| def run(self):
| if self.metadata:
self.check_metadata()
if self.restructuredtext:
if HAS_DOCUTILS:
self.check_restructuredtext()
elif self.strict:
raise DistutilsSetupError('The docutils package is needed.')
if (self.strict and (self._warnings > 0)):
raise DistutilsSetupError('Please correct your package.')
|
'Ensures that all required elements of meta-data are supplied.
name, version, URL, (author and author_email) or
(maintainer and maintainer_email)).
Warns if any are missing.'
| def check_metadata(self):
| metadata = self.distribution.metadata
missing = []
for attr in ('name', 'version', 'url'):
if (not (hasattr(metadata, attr) and getattr(metadata, attr))):
missing.append(attr)
if missing:
self.warn(('missing required meta-data: %s' % ', '.join(missing)))
if metadata.author:
if (not metadata.author_email):
self.warn(("missing meta-data: if 'author' supplied, " + "'author_email' must be supplied too"))
elif metadata.maintainer:
if (not metadata.maintainer_email):
self.warn(("missing meta-data: if 'maintainer' supplied, " + "'maintainer_email' must be supplied too"))
else:
self.warn((('missing meta-data: either (author and author_email) ' + 'or (maintainer and maintainer_email) ') + 'must be supplied'))
|
'Checks if the long string fields are reST-compliant.'
| def check_restructuredtext(self):
| data = self.distribution.get_long_description()
for warning in self._check_rst_data(data):
line = warning[(-1)].get('line')
if (line is None):
warning = warning[1]
else:
warning = ('%s (line %s)' % (warning[1], line))
self.warn(warning)
|
'Returns warnings when the provided data doesn\'t compile.'
| def _check_rst_data(self, data):
| source_path = StringIO()
parser = Parser()
settings = frontend.OptionParser().get_default_values()
settings.tab_width = 4
settings.pep_references = None
settings.rfc_references = None
reporter = SilentReporter(source_path, settings.report_level, settings.halt_level, stream=settings.warning_stream, debug=settings.debug, encoding=settings.error_encoding, error_handler=settings.error_encoding_error_handler)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, (-1))
try:
parser.parse(data, document)
except AttributeError:
reporter.messages.append(((-1), 'Could not finish the parsing.', '', {}))
return reporter.messages
|
'Generate the text of an RPM spec file and return it as a
list of strings (one per line).'
| def _make_spec_file(self):
| spec_file = [('%define name ' + self.distribution.get_name()), ('%define version ' + self.distribution.get_version().replace('-', '_')), ('%define unmangled_version ' + self.distribution.get_version()), ('%define release ' + self.release.replace('-', '_')), '', ('Summary: ' + self.distribution.get_description())]
spec_file.extend(['Name: %{name}', 'Version: %{version}', 'Release: %{release}'])
if self.use_bzip2:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
else:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
spec_file.extend([('License: ' + self.distribution.get_license()), ('Group: ' + self.group), 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', 'Prefix: %{_prefix}'])
if (not self.force_arch):
if (not self.distribution.has_ext_modules()):
spec_file.append('BuildArch: noarch')
else:
spec_file.append(('BuildArch: %s' % self.force_arch))
for field in ('Vendor', 'Packager', 'Provides', 'Requires', 'Conflicts', 'Obsoletes'):
val = getattr(self, string.lower(field))
if isinstance(val, list):
spec_file.append(('%s: %s' % (field, string.join(val))))
elif (val is not None):
spec_file.append(('%s: %s' % (field, val)))
if (self.distribution.get_url() != 'UNKNOWN'):
spec_file.append(('Url: ' + self.distribution.get_url()))
if self.distribution_name:
spec_file.append(('Distribution: ' + self.distribution_name))
if self.build_requires:
spec_file.append(('BuildRequires: ' + string.join(self.build_requires)))
if self.icon:
spec_file.append(('Icon: ' + os.path.basename(self.icon)))
if self.no_autoreq:
spec_file.append('AutoReq: 0')
spec_file.extend(['', '%description', self.distribution.get_long_description()])
def_setup_call = ('%s %s' % (self.python, os.path.basename(sys.argv[0])))
def_build = ('%s build' % def_setup_call)
if self.use_rpm_opt_flags:
def_build = ('env CFLAGS="$RPM_OPT_FLAGS" ' + def_build)
install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES' % def_setup_call)
script_options = [('prep', 'prep_script', '%setup -n %{name}-%{unmangled_version}'), ('build', 'build_script', def_build), ('install', 'install_script', install_cmd), ('clean', 'clean_script', 'rm -rf $RPM_BUILD_ROOT'), ('verifyscript', 'verify_script', None), ('pre', 'pre_install', None), ('post', 'post_install', None), ('preun', 'pre_uninstall', None), ('postun', 'post_uninstall', None)]
for (rpm_opt, attr, default) in script_options:
val = getattr(self, attr)
if (val or default):
spec_file.extend(['', ('%' + rpm_opt)])
if val:
spec_file.extend(string.split(open(val, 'r').read(), '\n'))
else:
spec_file.append(default)
spec_file.extend(['', '%files -f INSTALLED_FILES', '%defattr(-,root,root)'])
if self.doc_files:
spec_file.append(('%doc ' + string.join(self.doc_files)))
if self.changelog:
spec_file.extend(['', '%changelog'])
spec_file.extend(self.changelog)
return spec_file
|
'Format the changelog correctly and convert it to a list of strings'
| def _format_changelog(self, changelog):
| if (not changelog):
return changelog
new_changelog = []
for line in string.split(string.strip(changelog), '\n'):
line = string.strip(line)
if (line[0] == '*'):
new_changelog.extend(['', line])
elif (line[0] == '-'):
new_changelog.append(line)
else:
new_changelog.append((' ' + line))
if (not new_changelog[0]):
del new_changelog[0]
return new_changelog
|
'Ensure that the list of extensions (presumably provided as a
command option \'extensions\') is valid, i.e. it is a list of
Extension objects. We also support the old-style list of 2-tuples,
where the tuples are (ext_name, build_info), which are converted to
Extension instances here.
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.'
| def check_extensions_list(self, extensions):
| if (not isinstance(extensions, list)):
raise DistutilsSetupError, "'ext_modules' option must be a list of Extension instances"
for (i, ext) in enumerate(extensions):
if isinstance(ext, Extension):
continue
if ((not isinstance(ext, tuple)) or (len(ext) != 2)):
raise DistutilsSetupError, "each element of 'ext_modules' option must be an Extension instance or 2-tuple"
(ext_name, build_info) = ext
log.warn(("old-style (ext_name, build_info) tuple found in ext_modules for extension '%s'-- please convert to Extension instance" % ext_name))
if (not (isinstance(ext_name, str) and extension_name_re.match(ext_name))):
raise DistutilsSetupError, "first element of each tuple in 'ext_modules' must be the extension name (a string)"
if (not isinstance(build_info, dict)):
raise DistutilsSetupError, "second element of each tuple in 'ext_modules' must be a dictionary (build info)"
ext = Extension(ext_name, build_info['sources'])
for key in ('include_dirs', 'library_dirs', 'libraries', 'extra_objects', 'extra_compile_args', 'extra_link_args'):
val = build_info.get(key)
if (val is not None):
setattr(ext, key, val)
ext.runtime_library_dirs = build_info.get('rpath')
if ('def_file' in build_info):
log.warn("'def_file' element of build info dict no longer supported")
macros = build_info.get('macros')
if macros:
ext.define_macros = []
ext.undef_macros = []
for macro in macros:
if (not (isinstance(macro, tuple) and (len(macro) in (1, 2)))):
raise DistutilsSetupError, "'macros' element of build info dict must be 1- or 2-tuple"
if (len(macro) == 1):
ext.undef_macros.append(macro[0])
elif (len(macro) == 2):
ext.define_macros.append(macro)
extensions[i] = ext
|
'Walk the list of source files in \'sources\', looking for SWIG
interface (.i) files. Run SWIG on all that are found, and
return a modified \'sources\' list with SWIG source files replaced
by the generated C (or C++) files.'
| def swig_sources(self, sources, extension):
| new_sources = []
swig_sources = []
swig_targets = {}
if self.swig_cpp:
log.warn('--swig-cpp is deprecated - use --swig-opts=-c++')
if (self.swig_cpp or ('-c++' in self.swig_opts) or ('-c++' in extension.swig_opts)):
target_ext = '.cpp'
else:
target_ext = '.c'
for source in sources:
(base, ext) = os.path.splitext(source)
if (ext == '.i'):
new_sources.append(((base + '_wrap') + target_ext))
swig_sources.append(source)
swig_targets[source] = new_sources[(-1)]
else:
new_sources.append(source)
if (not swig_sources):
return new_sources
swig = (self.swig or self.find_swig())
swig_cmd = [swig, '-python']
swig_cmd.extend(self.swig_opts)
if self.swig_cpp:
swig_cmd.append('-c++')
if (not self.swig_opts):
for o in extension.swig_opts:
swig_cmd.append(o)
for source in swig_sources:
target = swig_targets[source]
log.info('swigging %s to %s', source, target)
self.spawn((swig_cmd + ['-o', target, source]))
return new_sources
|
'Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows.'
| def find_swig(self):
| if (os.name == 'posix'):
return 'swig'
elif (os.name == 'nt'):
for vers in ('1.3', '1.2', '1.1'):
fn = os.path.join(('c:\\swig%s' % vers), 'swig.exe')
if os.path.isfile(fn):
return fn
else:
return 'swig.exe'
elif (os.name == 'os2'):
return 'swig.exe'
else:
raise DistutilsPlatformError, ("I don't know how to find (much less run) SWIG on platform '%s'" % os.name)
|
'Returns the path of the filename for a given extension.
The file is located in `build_lib` or directly in the package
(inplace option).'
| def get_ext_fullpath(self, ext_name):
| all_dots = string.maketrans(('/' + os.sep), '..')
ext_name = ext_name.translate(all_dots)
fullname = self.get_ext_fullname(ext_name)
modpath = fullname.split('.')
filename = self.get_ext_filename(ext_name)
filename = os.path.split(filename)[(-1)]
if (not self.inplace):
filename = os.path.join(*(modpath[:(-1)] + [filename]))
return os.path.join(self.build_lib, filename)
package = '.'.join(modpath[0:(-1)])
build_py = self.get_finalized_command('build_py')
package_dir = os.path.abspath(build_py.get_package_dir(package))
return os.path.join(package_dir, filename)
|
'Returns the fullname of a given extension name.
Adds the `package.` prefix'
| def get_ext_fullname(self, ext_name):
| if (self.package is None):
return ext_name
else:
return ((self.package + '.') + ext_name)
|
'Convert the name of an extension (eg. "foo.bar") into the name
of the file from which it will be loaded (eg. "foo/bar.so", or
"foo\bar.pyd").'
| def get_ext_filename(self, ext_name):
| from distutils.sysconfig import get_config_var
ext_path = string.split(ext_name, '.')
if (os.name == 'os2'):
ext_path[(len(ext_path) - 1)] = ext_path[(len(ext_path) - 1)][:8]
so_ext = get_config_var('SO')
if ((os.name == 'nt') and self.debug):
return ((os.path.join(*ext_path) + '_d') + so_ext)
return (os.path.join(*ext_path) + so_ext)
|
'Return the list of symbols that a shared extension has to
export. This either uses \'ext.export_symbols\' or, if it\'s not
provided, "init" + module_name. Only relevant on Windows, where
the .pyd file (DLL) must export the module "init" function.'
| def get_export_symbols(self, ext):
| initfunc_name = ('init' + ext.name.split('.')[(-1)])
if (initfunc_name not in ext.export_symbols):
ext.export_symbols.append(initfunc_name)
return ext.export_symbols
|
'Return the list of libraries to link against when building a
shared extension. On most platforms, this is just \'ext.libraries\';
on Windows and OS/2, we add the Python library (eg. python20.dll).'
| def get_libraries(self, ext):
| if (sys.platform == 'win32'):
from distutils.msvccompiler import MSVCCompiler
if (not isinstance(self.compiler, MSVCCompiler)):
template = 'python%d%d'
if self.debug:
template = (template + '_d')
pythonlib = (template % ((sys.hexversion >> 24), ((sys.hexversion >> 16) & 255)))
return (ext.libraries + [pythonlib])
else:
return ext.libraries
elif (sys.platform == 'os2emx'):
template = 'python%d%d'
pythonlib = (template % ((sys.hexversion >> 24), ((sys.hexversion >> 16) & 255)))
return (ext.libraries + [pythonlib])
elif (sys.platform[:6] == 'cygwin'):
template = 'python%d.%d'
pythonlib = (template % ((sys.hexversion >> 24), ((sys.hexversion >> 16) & 255)))
return (ext.libraries + [pythonlib])
elif (sys.platform[:6] == 'atheos'):
from distutils import sysconfig
template = 'python%d.%d'
pythonlib = (template % ((sys.hexversion >> 24), ((sys.hexversion >> 16) & 255)))
extra = []
for lib in sysconfig.get_config_var('SHLIBS').split():
if lib.startswith('-l'):
extra.append(lib[2:])
else:
extra.append(lib)
return ((ext.libraries + [pythonlib, 'm']) + extra)
elif (sys.platform == 'darwin'):
return ext.libraries
elif (sys.platform[:3] == 'aix'):
return ext.libraries
else:
from distutils import sysconfig
if sysconfig.get_config_var('Py_ENABLE_SHARED'):
template = 'python%d.%d'
pythonlib = (template % ((sys.hexversion >> 24), ((sys.hexversion >> 16) & 255)))
return (ext.libraries + [pythonlib])
else:
return ext.libraries
|
'Copy each script listed in \'self.scripts\'; if it\'s marked as a
Python script in the Unix way (first line matches \'first_line_re\',
ie. starts with "\#!" and contains "python"), then adjust the first
line to refer to the current Python interpreter as we copy.'
| def copy_scripts(self):
| _sysconfig = __import__('sysconfig')
self.mkpath(self.build_dir)
outfiles = []
for script in self.scripts:
adjust = 0
script = convert_path(script)
outfile = os.path.join(self.build_dir, os.path.basename(script))
outfiles.append(outfile)
if ((not self.force) and (not newer(script, outfile))):
log.debug('not copying %s (up-to-date)', script)
continue
try:
f = open(script, 'r')
except IOError:
if (not self.dry_run):
raise
f = None
else:
first_line = f.readline()
if (not first_line):
self.warn(('%s is an empty file (skipping)' % script))
continue
match = first_line_re.match(first_line)
if match:
adjust = 1
post_interp = (match.group(1) or '')
if adjust:
log.info('copying and adjusting %s -> %s', script, self.build_dir)
if (not self.dry_run):
outf = open(outfile, 'w')
if (not _sysconfig.is_python_build()):
outf.write(('#!%s%s\n' % (self.executable, post_interp)))
else:
outf.write(('#!%s%s\n' % (os.path.join(_sysconfig.get_config_var('BINDIR'), ('python%s%s' % (_sysconfig.get_config_var('VERSION'), _sysconfig.get_config_var('EXE')))), post_interp)))
outf.writelines(f.readlines())
outf.close()
if f:
f.close()
else:
if f:
f.close()
self.copy_file(script, outfile)
if (os.name == 'posix'):
for file in outfiles:
if self.dry_run:
log.info('changing mode of %s', file)
else:
oldmode = (os.stat(file)[ST_MODE] & 4095)
newmode = ((oldmode | 365) & 4095)
if (newmode != oldmode):
log.info('changing mode of %s from %o to %o', file, oldmode, newmode)
os.chmod(file, newmode)
|
'Dialog(database, name, x, y, w, h, attributes, title, first,
default, cancel, bitmap=true)'
| def __init__(self, *args, **kw):
| Dialog.__init__(self, *args)
ruler = (self.h - 36)
self.line('BottomLine', 0, ruler, self.w, 0)
|
'Set the title text of the dialog at the top.'
| def title(self, title):
| self.text('Title', 15, 10, 320, 60, 196611, ('{\\VerdanaBold10}%s' % title))
|
'Add a back button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated'
| def back(self, title, next, name='Back', active=1):
| if active:
flags = 3
else:
flags = 1
return self.pushbutton(name, 180, (self.h - 27), 56, 17, flags, title, next)
|
'Add a cancel button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated'
| def cancel(self, title, next, name='Cancel', active=1):
| if active:
flags = 3
else:
flags = 1
return self.pushbutton(name, 304, (self.h - 27), 56, 17, flags, title, next)
|
'Add a Next button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated'
| def next(self, title, next, name='Next', active=1):
| if active:
flags = 3
else:
flags = 1
return self.pushbutton(name, 236, (self.h - 27), 56, 17, flags, title, next)
|
'Add a button with a given title, the tab-next button,
its name in the Control table, giving its x position; the
y-position is aligned with the other buttons.
Return the button, so that events can be associated'
| def xbutton(self, name, title, next, xpos):
| return self.pushbutton(name, int(((self.w * xpos) - 28)), (self.h - 27), 56, 17, 3, title, next)
|
'Adds code to the installer to compute the location of Python.
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
registry for each version of Python.
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
else from PYTHON.MACHINE.X.Y.
Properties PYTHONX.Y will be set to TARGETDIRX.Y\python.exe'
| def add_find_python(self):
| start = 402
for ver in self.versions:
install_path = ('SOFTWARE\\Python\\PythonCore\\%s\\InstallPath' % ver)
machine_reg = ('python.machine.' + ver)
user_reg = ('python.user.' + ver)
machine_prop = ('PYTHON.MACHINE.' + ver)
user_prop = ('PYTHON.USER.' + ver)
machine_action = ('PythonFromMachine' + ver)
user_action = ('PythonFromUser' + ver)
exe_action = ('PythonExe' + ver)
target_dir_prop = ('TARGETDIR' + ver)
exe_prop = ('PYTHON' + ver)
if msilib.Win64:
Type = (2 + 16)
else:
Type = 2
add_data(self.db, 'RegLocator', [(machine_reg, 2, install_path, None, Type), (user_reg, 1, install_path, None, Type)])
add_data(self.db, 'AppSearch', [(machine_prop, machine_reg), (user_prop, user_reg)])
add_data(self.db, 'CustomAction', [(machine_action, (51 + 256), target_dir_prop, (('[' + machine_prop) + ']')), (user_action, (51 + 256), target_dir_prop, (('[' + user_prop) + ']')), (exe_action, (51 + 256), exe_prop, (('[' + target_dir_prop) + ']\\python.exe'))])
add_data(self.db, 'InstallExecuteSequence', [(machine_action, machine_prop, start), (user_action, user_prop, (start + 1)), (exe_action, None, (start + 2))])
add_data(self.db, 'InstallUISequence', [(machine_action, machine_prop, start), (user_action, user_prop, (start + 1)), (exe_action, None, (start + 2))])
add_data(self.db, 'Condition', [(('Python' + ver), 0, ('NOT TARGETDIR' + ver))])
start += 4
assert (start < 500)
|
'Callable used for the check sub-command.
Placed here so user_options can view it'
| def checking_metadata(self):
| return self.metadata_check
|
'Deprecated API.'
| def check_metadata(self):
| warn('distutils.command.sdist.check_metadata is deprecated, use the check command instead', PendingDeprecationWarning)
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.run()
|
'Figure out the list of files to include in the source
distribution, and put it in \'self.filelist\'. This might involve
reading the manifest template (and writing the manifest), or just
reading the manifest, or just using the default file set -- it all
depends on the user\'s options.'
| def get_file_list(self):
| template_exists = os.path.isfile(self.template)
if (not template_exists):
self.warn((("manifest template '%s' does not exist " + '(using default file list)') % self.template))
self.filelist.findall()
if self.use_defaults:
self.add_defaults()
if template_exists:
self.read_template()
if self.prune:
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest()
|
'Add all the default files to self.filelist:
- README or README.txt
- setup.py
- test/test*.py
- all pure Python modules mentioned in setup script
- all files pointed by package_data (build_py)
- all files defined in data_files.
- all files defined as scripts.
- all C sources listed as part of extensions or C libraries
in the setup script (doesn\'t catch C headers!)
Warns if (README or README.txt) or setup.py are missing; everything
else is optional.'
| def add_defaults(self):
| standards = [('README', 'README.txt'), self.distribution.script_name]
for fn in standards:
if isinstance(fn, tuple):
alts = fn
got_it = 0
for fn in alts:
if os.path.exists(fn):
got_it = 1
self.filelist.append(fn)
break
if (not got_it):
self.warn(('standard file not found: should have one of ' + string.join(alts, ', ')))
elif os.path.exists(fn):
self.filelist.append(fn)
else:
self.warn(("standard file '%s' not found" % fn))
optional = ['test/test*.py', 'setup.cfg']
for pattern in optional:
files = filter(os.path.isfile, glob(pattern))
if files:
self.filelist.extend(files)
build_py = self.get_finalized_command('build_py')
if self.distribution.has_pure_modules():
self.filelist.extend(build_py.get_source_files())
for (pkg, src_dir, build_dir, filenames) in build_py.data_files:
for filename in filenames:
self.filelist.append(os.path.join(src_dir, filename))
if self.distribution.has_data_files():
for item in self.distribution.data_files:
if isinstance(item, str):
item = convert_path(item)
if os.path.isfile(item):
self.filelist.append(item)
else:
(dirname, filenames) = item
for f in filenames:
f = convert_path(f)
if os.path.isfile(f):
self.filelist.append(f)
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
self.filelist.extend(build_ext.get_source_files())
if self.distribution.has_c_libraries():
build_clib = self.get_finalized_command('build_clib')
self.filelist.extend(build_clib.get_source_files())
if self.distribution.has_scripts():
build_scripts = self.get_finalized_command('build_scripts')
self.filelist.extend(build_scripts.get_source_files())
|
'Read and parse manifest template file named by self.template.
(usually "MANIFEST.in") The parsing and processing is done by
\'self.filelist\', which updates itself accordingly.'
| def read_template(self):
| log.info("reading manifest template '%s'", self.template)
template = TextFile(self.template, strip_comments=1, skip_blanks=1, join_lines=1, lstrip_ws=1, rstrip_ws=1, collapse_join=1)
try:
while 1:
line = template.readline()
if (line is None):
break
try:
self.filelist.process_template_line(line)
except DistutilsTemplateError as msg:
self.warn(('%s, line %d: %s' % (template.filename, template.current_line, msg)))
finally:
template.close()
|
'Prune off branches that might slip into the file list as created
by \'read_template()\', but really don\'t belong there:
* the build tree (typically "build")
* the release tree itself (only an issue if we ran "sdist"
previously with --keep-temp, or it aborted)
* any RCS, CVS, .svn, .hg, .git, .bzr, _darcs directories'
| def prune_file_list(self):
| build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.exclude_pattern(None, prefix=build.build_base)
self.filelist.exclude_pattern(None, prefix=base_dir)
if (sys.platform == 'win32'):
seps = '/|\\\\'
else:
seps = '/'
vcs_dirs = ['RCS', 'CVS', '\\.svn', '\\.hg', '\\.git', '\\.bzr', '_darcs']
vcs_ptrn = ('(^|%s)(%s)(%s).*' % (seps, '|'.join(vcs_dirs), seps))
self.filelist.exclude_pattern(vcs_ptrn, is_regex=1)
|
'Write the file list in \'self.filelist\' (presumably as filled in
by \'add_defaults()\' and \'read_template()\') to the manifest file
named by \'self.manifest\'.'
| def write_manifest(self):
| if os.path.isfile(self.manifest):
fp = open(self.manifest)
try:
first_line = fp.readline()
finally:
fp.close()
if (first_line != '# file GENERATED by distutils, do NOT edit\n'):
log.info(("not writing to manually maintained manifest file '%s'" % self.manifest))
return
content = self.filelist.files[:]
content.insert(0, '# file GENERATED by distutils, do NOT edit')
self.execute(file_util.write_file, (self.manifest, content), ("writing manifest file '%s'" % self.manifest))
|
'Read the manifest file (named by \'self.manifest\') and use it to
fill in \'self.filelist\', the list of files to include in the source
distribution.'
| def read_manifest(self):
| log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest)
while 1:
line = manifest.readline()
if (line == ''):
break
if (line[(-1)] == '\n'):
line = line[0:(-1)]
self.filelist.append(line)
manifest.close()
|
'Create the directory tree that will become the source
distribution archive. All directories implied by the filenames in
\'files\' are created under \'base_dir\', and then we hard link or copy
(if hard linking is unavailable) those files into place.
Essentially, this duplicates the developer\'s source tree, but in a
directory named after the distribution, containing only the files
to be distributed.'
| def make_release_tree(self, base_dir, files):
| self.mkpath(base_dir)
dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
if hasattr(os, 'link'):
link = 'hard'
msg = ('making hard links in %s...' % base_dir)
else:
link = None
msg = ('copying files to %s...' % base_dir)
if (not files):
log.warn('no files to distribute -- empty manifest?')
else:
log.info(msg)
for file in files:
if (not os.path.isfile(file)):
log.warn(("'%s' not a regular file -- skipping" % file))
else:
dest = os.path.join(base_dir, file)
self.copy_file(file, dest, link=link)
self.distribution.metadata.write_pkg_info(base_dir)
|
'Create the source distribution(s). First, we create the release
tree with \'make_release_tree()\'; then, we create all required
archive files (according to \'self.formats\') from the release tree.
Finally, we clean up by blowing away the release tree (unless
\'self.keep_temp\' is true). The list of archive files created is
stored so it can be retrieved later by \'get_archive_files()\'.'
| def make_distribution(self):
| base_dir = self.distribution.get_fullname()
base_name = os.path.join(self.dist_dir, base_dir)
self.make_release_tree(base_dir, self.filelist.files)
archive_files = []
if ('tar' in self.formats):
self.formats.append(self.formats.pop(self.formats.index('tar')))
for fmt in self.formats:
file = self.make_archive(base_name, fmt, base_dir=base_dir, owner=self.owner, group=self.group)
archive_files.append(file)
self.distribution.dist_files.append(('sdist', '', file))
self.archive_files = archive_files
if (not self.keep_temp):
dir_util.remove_tree(base_dir, dry_run=self.dry_run)
|
'Return the list of archive files created when the command
was run, or None if the command hasn\'t run yet.'
| def get_archive_files(self):
| return self.archive_files
|
'Return the list of files that would be installed if this command
were actually run. Not affected by the "dry-run" flag or whether
modules have actually been built yet.'
| def get_outputs(self):
| pure_outputs = self._mutate_outputs(self.distribution.has_pure_modules(), 'build_py', 'build_lib', self.install_dir)
if self.compile:
bytecode_outputs = self._bytecode_filenames(pure_outputs)
else:
bytecode_outputs = []
ext_outputs = self._mutate_outputs(self.distribution.has_ext_modules(), 'build_ext', 'build_lib', self.install_dir)
return ((pure_outputs + bytecode_outputs) + ext_outputs)
|
'Get the list of files that are input to this command, ie. the
files that get installed as they are named in the build tree.
The files in this list correspond one-to-one to the output
filenames returned by \'get_outputs()\'.'
| def get_inputs(self):
| inputs = []
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
inputs.extend(build_py.get_outputs())
if self.distribution.has_ext_modules():
build_ext = self.get_finalized_command('build_ext')
inputs.extend(build_ext.get_outputs())
return inputs
|
'Ensure that the list of libraries is valid.
`library` is presumably provided as a command option \'libraries\'.
This method checks that it is a list of 2-tuples, where the tuples
are (library_name, build_info_dict).
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.'
| def check_library_list(self, libraries):
| if (not isinstance(libraries, list)):
raise DistutilsSetupError, "'libraries' option must be a list of tuples"
for lib in libraries:
if ((not isinstance(lib, tuple)) and (len(lib) != 2)):
raise DistutilsSetupError, "each element of 'libraries' must a 2-tuple"
(name, build_info) = lib
if (not isinstance(name, str)):
raise DistutilsSetupError, ("first element of each tuple in 'libraries' " + 'must be a string (the library name)')
if (('/' in name) or ((os.sep != '/') and (os.sep in name))):
raise DistutilsSetupError, (("bad library name '%s': " + 'may not contain directory separators') % lib[0])
if (not isinstance(build_info, dict)):
raise DistutilsSetupError, ("second element of each tuple in 'libraries' " + 'must be a dictionary (build info)')
|
'Define the executables (and options for them) that will be run
to perform the various stages of compilation. The exact set of
executables that may be specified here depends on the compiler
class (via the \'executables\' class attribute), but most will have:
compiler the C/C++ compiler
linker_so linker used to create shared objects and libraries
linker_exe linker used to create binary executables
archiver static library creator
On platforms with a command-line (Unix, DOS/Windows), each of these
is a string that will be split into executable name and (optional)
list of arguments. (Splitting the string is done similarly to how
Unix shells operate: words are delimited by spaces, but quotes and
backslashes can override this. See
\'distutils.util.split_quoted()\'.)'
| def set_executables(self, **args):
| for key in args.keys():
if (key not in self.executables):
raise ValueError, ("unknown executable '%s' for class %s" % (key, self.__class__.__name__))
self.set_executable(key, args[key])
|
'Ensures that every element of \'definitions\' is a valid macro
definition, ie. either (name,value) 2-tuple or a (name,) tuple. Do
nothing if all definitions are OK, raise TypeError otherwise.'
| def _check_macro_definitions(self, definitions):
| for defn in definitions:
if (not (isinstance(defn, tuple) and ((len(defn) == 1) or ((len(defn) == 2) and (isinstance(defn[1], str) or (defn[1] is None)))) and isinstance(defn[0], str))):
raise TypeError, ((("invalid macro definition '%s': " % defn) + 'must be tuple (string,), (string, string), or ') + '(string, None)')
|
'Define a preprocessor macro for all compilations driven by this
compiler object. The optional parameter \'value\' should be a
string; if it is not supplied, then the macro will be defined
without an explicit value and the exact outcome depends on the
compiler used (XXX true? does ANSI say anything about this?)'
| def define_macro(self, name, value=None):
| i = self._find_macro(name)
if (i is not None):
del self.macros[i]
defn = (name, value)
self.macros.append(defn)
|
'Undefine a preprocessor macro for all compilations driven by
this compiler object. If the same macro is defined by
\'define_macro()\' and undefined by \'undefine_macro()\' the last call
takes precedence (including multiple redefinitions or
undefinitions). If the macro is redefined/undefined on a
per-compilation basis (ie. in the call to \'compile()\'), then that
takes precedence.'
| def undefine_macro(self, name):
| i = self._find_macro(name)
if (i is not None):
del self.macros[i]
undefn = (name,)
self.macros.append(undefn)
|
'Add \'dir\' to the list of directories that will be searched for
header files. The compiler is instructed to search directories in
the order in which they are supplied by successive calls to
\'add_include_dir()\'.'
| def add_include_dir(self, dir):
| self.include_dirs.append(dir)
|
'Set the list of directories that will be searched to \'dirs\' (a
list of strings). Overrides any preceding calls to
\'add_include_dir()\'; subsequence calls to \'add_include_dir()\' add
to the list passed to \'set_include_dirs()\'. This does not affect
any list of standard include directories that the compiler may
search by default.'
| def set_include_dirs(self, dirs):
| self.include_dirs = dirs[:]
|
'Add \'libname\' to the list of libraries that will be included in
all links driven by this compiler object. Note that \'libname\'
should *not* be the name of a file containing a library, but the
name of the library itself: the actual filename will be inferred by
the linker, the compiler, or the compiler class (depending on the
platform).
The linker will be instructed to link against libraries in the
order they were supplied to \'add_library()\' and/or
\'set_libraries()\'. It is perfectly valid to duplicate library
names; the linker will be instructed to link against libraries as
many times as they are mentioned.'
| def add_library(self, libname):
| self.libraries.append(libname)
|
'Set the list of libraries to be included in all links driven by
this compiler object to \'libnames\' (a list of strings). This does
not affect any standard system libraries that the linker may
include by default.'
| def set_libraries(self, libnames):
| self.libraries = libnames[:]
|
'Add \'dir\' to the list of directories that will be searched for
libraries specified to \'add_library()\' and \'set_libraries()\'. The
linker will be instructed to search for libraries in the order they
are supplied to \'add_library_dir()\' and/or \'set_library_dirs()\'.'
| def add_library_dir(self, dir):
| self.library_dirs.append(dir)
|
'Set the list of library search directories to \'dirs\' (a list of
strings). This does not affect any standard library search path
that the linker may search by default.'
| def set_library_dirs(self, dirs):
| self.library_dirs = dirs[:]
|
'Add \'dir\' to the list of directories that will be searched for
shared libraries at runtime.'
| def add_runtime_library_dir(self, dir):
| self.runtime_library_dirs.append(dir)
|
'Set the list of directories to search for shared libraries at
runtime to \'dirs\' (a list of strings). This does not affect any
standard search path that the runtime linker may search by
default.'
| def set_runtime_library_dirs(self, dirs):
| self.runtime_library_dirs = dirs[:]
|
'Add \'object\' to the list of object files (or analogues, such as
explicitly named library files or the output of "resource
compilers") to be included in every link driven by this compiler
object.'
| def add_link_object(self, object):
| self.objects.append(object)
|
'Set the list of object files (or analogues) to be included in
every link to \'objects\'. This does not affect any standard object
files that the linker may include by default (such as system
libraries).'
| def set_link_objects(self, objects):
| self.objects = objects[:]
|
'Process arguments and decide which source files to compile.'
| def _setup_compile(self, outdir, macros, incdirs, sources, depends, extra):
| if (outdir is None):
outdir = self.output_dir
elif (not isinstance(outdir, str)):
raise TypeError, "'output_dir' must be a string or None"
if (macros is None):
macros = self.macros
elif isinstance(macros, list):
macros = (macros + (self.macros or []))
else:
raise TypeError, "'macros' (if supplied) must be a list of tuples"
if (incdirs is None):
incdirs = self.include_dirs
elif isinstance(incdirs, (list, tuple)):
incdirs = (list(incdirs) + (self.include_dirs or []))
else:
raise TypeError, "'include_dirs' (if supplied) must be a list of strings"
if (extra is None):
extra = []
objects = self.object_filenames(sources, strip_dir=0, output_dir=outdir)
assert (len(objects) == len(sources))
pp_opts = gen_preprocess_options(macros, incdirs)
build = {}
for i in range(len(sources)):
src = sources[i]
obj = objects[i]
ext = os.path.splitext(src)[1]
self.mkpath(os.path.dirname(obj))
build[obj] = (src, ext)
return (macros, objects, extra, pp_opts, build)
|
'Typecheck and fix-up some of the arguments to the \'compile()\'
method, and return fixed-up values. Specifically: if \'output_dir\'
is None, replaces it with \'self.output_dir\'; ensures that \'macros\'
is a list, and augments it with \'self.macros\'; ensures that
\'include_dirs\' is a list, and augments it with \'self.include_dirs\'.
Guarantees that the returned values are of the correct type,
i.e. for \'output_dir\' either string or None, and for \'macros\' and
\'include_dirs\' either list or None.'
| def _fix_compile_args(self, output_dir, macros, include_dirs):
| if (output_dir is None):
output_dir = self.output_dir
elif (not isinstance(output_dir, str)):
raise TypeError, "'output_dir' must be a string or None"
if (macros is None):
macros = self.macros
elif isinstance(macros, list):
macros = (macros + (self.macros or []))
else:
raise TypeError, "'macros' (if supplied) must be a list of tuples"
if (include_dirs is None):
include_dirs = self.include_dirs
elif isinstance(include_dirs, (list, tuple)):
include_dirs = (list(include_dirs) + (self.include_dirs or []))
else:
raise TypeError, "'include_dirs' (if supplied) must be a list of strings"
return (output_dir, macros, include_dirs)
|
'Typecheck and fix up some arguments supplied to various methods.
Specifically: ensure that \'objects\' is a list; if output_dir is
None, replace with self.output_dir. Return fixed versions of
\'objects\' and \'output_dir\'.'
| def _fix_object_args(self, objects, output_dir):
| if (not isinstance(objects, (list, tuple))):
raise TypeError, "'objects' must be a list or tuple of strings"
objects = list(objects)
if (output_dir is None):
output_dir = self.output_dir
elif (not isinstance(output_dir, str)):
raise TypeError, "'output_dir' must be a string or None"
return (objects, output_dir)
|
'Typecheck and fix up some of the arguments supplied to the
\'link_*\' methods. Specifically: ensure that all arguments are
lists, and augment them with their permanent versions
(eg. \'self.libraries\' augments \'libraries\'). Return a tuple with
fixed versions of all arguments.'
| def _fix_lib_args(self, libraries, library_dirs, runtime_library_dirs):
| if (libraries is None):
libraries = self.libraries
elif isinstance(libraries, (list, tuple)):
libraries = (list(libraries) + (self.libraries or []))
else:
raise TypeError, "'libraries' (if supplied) must be a list of strings"
if (library_dirs is None):
library_dirs = self.library_dirs
elif isinstance(library_dirs, (list, tuple)):
library_dirs = (list(library_dirs) + (self.library_dirs or []))
else:
raise TypeError, "'library_dirs' (if supplied) must be a list of strings"
if (runtime_library_dirs is None):
runtime_library_dirs = self.runtime_library_dirs
elif isinstance(runtime_library_dirs, (list, tuple)):
runtime_library_dirs = (list(runtime_library_dirs) + (self.runtime_library_dirs or []))
else:
raise TypeError, ("'runtime_library_dirs' (if supplied) " + 'must be a list of strings')
return (libraries, library_dirs, runtime_library_dirs)
|
'Return true if we need to relink the files listed in \'objects\'
to recreate \'output_file\'.'
| def _need_link(self, objects, output_file):
| if self.force:
return 1
else:
if self.dry_run:
newer = newer_group(objects, output_file, missing='newer')
else:
newer = newer_group(objects, output_file)
return newer
|
'Detect the language of a given file, or list of files. Uses
language_map, and language_order to do the job.'
| def detect_language(self, sources):
| if (not isinstance(sources, list)):
sources = [sources]
lang = None
index = len(self.language_order)
for source in sources:
(base, ext) = os.path.splitext(source)
extlang = self.language_map.get(ext)
try:
extindex = self.language_order.index(extlang)
if (extindex < index):
lang = extlang
index = extindex
except ValueError:
pass
return lang
|
'Preprocess a single C/C++ source file, named in \'source\'.
Output will be written to file named \'output_file\', or stdout if
\'output_file\' not supplied. \'macros\' is a list of macro
definitions as for \'compile()\', which will augment the macros set
with \'define_macro()\' and \'undefine_macro()\'. \'include_dirs\' is a
list of directory names that will be added to the default list.
Raises PreprocessError on failure.'
| def preprocess(self, source, output_file=None, macros=None, include_dirs=None, extra_preargs=None, extra_postargs=None):
| pass
|
'Compile one or more source files.
\'sources\' must be a list of filenames, most likely C/C++
files, but in reality anything that can be handled by a
particular compiler and compiler class (eg. MSVCCompiler can
handle resource files in \'sources\'). Return a list of object
filenames, one per source filename in \'sources\'. Depending on
the implementation, not all source files will necessarily be
compiled, but all corresponding object filenames will be
returned.
If \'output_dir\' is given, object files will be put under it, while
retaining their original path component. That is, "foo/bar.c"
normally compiles to "foo/bar.o" (for a Unix implementation); if
\'output_dir\' is "build", then it would compile to
"build/foo/bar.o".
\'macros\', if given, must be a list of macro definitions. A macro
definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
The former defines a macro; if the value is None, the macro is
defined without an explicit value. The 1-tuple case undefines a
macro. Later definitions/redefinitions/ undefinitions take
precedence.
\'include_dirs\', if given, must be a list of strings, the
directories to add to the default include file search path for this
compilation only.
\'debug\' is a boolean; if true, the compiler will be instructed to
output debug symbols in (or alongside) the object file(s).
\'extra_preargs\' and \'extra_postargs\' are implementation- dependent.
On platforms that have the notion of a command-line (e.g. Unix,
DOS/Windows), they are most likely lists of strings: extra
command-line arguments to prepand/append to the compiler command
line. On other platforms, consult the implementation class
documentation. In any event, they are intended as an escape hatch
for those occasions when the abstract compiler framework doesn\'t
cut the mustard.
\'depends\', if given, is a list of filenames that all targets
depend on. If a source file is older than any file in
depends, then the source file will be recompiled. This
supports dependency tracking, but only at a coarse
granularity.
Raises CompileError on failure.'
| def compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None):
| (macros, objects, extra_postargs, pp_opts, build) = self._setup_compile(output_dir, macros, include_dirs, sources, depends, extra_postargs)
cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
for obj in objects:
try:
(src, ext) = build[obj]
except KeyError:
continue
self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
return objects
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.