desc
stringlengths 3
26.7k
| decl
stringlengths 11
7.89k
| bodies
stringlengths 8
553k
|
---|---|---|
'Send a command and expect a response beginning with \'2\'.'
| def voidcmd(self, cmd):
| self.putcmd(cmd)
return self.voidresp()
|
'Send a PORT command with the current host and the given
port number.'
| def sendport(self, host, port):
| hbytes = host.split('.')
pbytes = [repr((port // 256)), repr((port % 256))]
bytes = (hbytes + pbytes)
cmd = ('PORT ' + ','.join(bytes))
return self.voidcmd(cmd)
|
'Send a EPRT command with the current host and the given port number.'
| def sendeprt(self, host, port):
| af = 0
if (self.af == socket.AF_INET):
af = 1
if (self.af == socket.AF_INET6):
af = 2
if (af == 0):
raise error_proto, 'unsupported address family'
fields = ['', repr(af), host, repr(port), '']
cmd = ('EPRT ' + '|'.join(fields))
return self.voidcmd(cmd)
|
'Create a new socket and send a PORT command for it.'
| def makeport(self):
| err = None
sock = None
for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
(af, socktype, proto, canonname, sa) = res
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
except socket.error as err:
if sock:
sock.close()
sock = None
continue
break
if (sock is None):
if (err is not None):
raise err
else:
raise socket.error('getaddrinfo returns an empty list')
sock.listen(1)
port = sock.getsockname()[1]
host = self.sock.getsockname()[0]
if (self.af == socket.AF_INET):
resp = self.sendport(host, port)
else:
resp = self.sendeprt(host, port)
if (self.timeout is not _GLOBAL_DEFAULT_TIMEOUT):
sock.settimeout(self.timeout)
return sock
|
'Initiate a transfer over the data connection.
If the transfer is active, send a port command and the
transfer command, and accept the connection. If the server is
passive, send a pasv command, connect to it, and start the
transfer command. Either way, return the socket for the
connection and the expected size of the transfer. The
expected size may be None if it could not be determined.
Optional `rest\' argument can be a string that is sent as the
argument to a REST command. This is essentially a server
marker used to tell the server to skip over any data up to the
given marker.'
| def ntransfercmd(self, cmd, rest=None):
| size = None
if self.passiveserver:
(host, port) = self.makepasv()
conn = socket.create_connection((host, port), self.timeout)
try:
if (rest is not None):
self.sendcmd(('REST %s' % rest))
resp = self.sendcmd(cmd)
if (resp[0] == '2'):
resp = self.getresp()
if (resp[0] != '1'):
raise error_reply, resp
except:
conn.close()
raise
else:
sock = self.makeport()
try:
if (rest is not None):
self.sendcmd(('REST %s' % rest))
resp = self.sendcmd(cmd)
if (resp[0] == '2'):
resp = self.getresp()
if (resp[0] != '1'):
raise error_reply, resp
(conn, sockaddr) = sock.accept()
if (self.timeout is not _GLOBAL_DEFAULT_TIMEOUT):
conn.settimeout(self.timeout)
finally:
sock.close()
if (resp[:3] == '150'):
size = parse150(resp)
return (conn, size)
|
'Like ntransfercmd() but returns only the socket.'
| def transfercmd(self, cmd, rest=None):
| return self.ntransfercmd(cmd, rest)[0]
|
'Login, default anonymous.'
| def login(self, user='', passwd='', acct=''):
| if (not user):
user = 'anonymous'
if (not passwd):
passwd = ''
if (not acct):
acct = ''
if ((user == 'anonymous') and (passwd in ('', '-'))):
passwd = (passwd + 'anonymous@')
resp = self.sendcmd(('USER ' + user))
if (resp[0] == '3'):
resp = self.sendcmd(('PASS ' + passwd))
if (resp[0] == '3'):
resp = self.sendcmd(('ACCT ' + acct))
if (resp[0] != '2'):
raise error_reply, resp
return resp
|
'Retrieve data in binary mode. A new port is created for you.
Args:
cmd: A RETR command.
callback: A single parameter callable to be called on each
block of data read.
blocksize: The maximum number of bytes to read from the
socket at one time. [default: 8192]
rest: Passed to transfercmd(). [default: None]
Returns:
The response code.'
| def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
| self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
while 1:
data = conn.recv(blocksize)
if (not data):
break
callback(data)
conn.close()
return self.voidresp()
|
'Retrieve data in line mode. A new port is created for you.
Args:
cmd: A RETR, LIST, NLST, or MLSD command.
callback: An optional single parameter callable that is called
for each line with the trailing CRLF stripped.
[default: print_line()]
Returns:
The response code.'
| def retrlines(self, cmd, callback=None):
| if (callback is None):
callback = print_line
resp = self.sendcmd('TYPE A')
conn = self.transfercmd(cmd)
fp = conn.makefile('rb')
while 1:
line = fp.readline((self.maxline + 1))
if (len(line) > self.maxline):
raise Error(('got more than %d bytes' % self.maxline))
if (self.debugging > 2):
print '*retr*', repr(line)
if (not line):
break
if (line[(-2):] == CRLF):
line = line[:(-2)]
elif (line[(-1):] == '\n'):
line = line[:(-1)]
callback(line)
fp.close()
conn.close()
return self.voidresp()
|
'Store a file in binary mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a read(num_bytes) method.
blocksize: The maximum data size to read from fp and send over
the connection at once. [default: 8192]
callback: An optional single parameter callable that is called on
each block of data after it is sent. [default: None]
rest: Passed to transfercmd(). [default: None]
Returns:
The response code.'
| def storbinary(self, cmd, fp, blocksize=8192, callback=None, rest=None):
| self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
while 1:
buf = fp.read(blocksize)
if (not buf):
break
conn.sendall(buf)
if callback:
callback(buf)
conn.close()
return self.voidresp()
|
'Store a file in line mode. A new port is created for you.
Args:
cmd: A STOR command.
fp: A file-like object with a readline() method.
callback: An optional single parameter callable that is called on
each line after it is sent. [default: None]
Returns:
The response code.'
| def storlines(self, cmd, fp, callback=None):
| self.voidcmd('TYPE A')
conn = self.transfercmd(cmd)
while 1:
buf = fp.readline((self.maxline + 1))
if (len(buf) > self.maxline):
raise Error(('got more than %d bytes' % self.maxline))
if (not buf):
break
if (buf[(-2):] != CRLF):
if (buf[(-1)] in CRLF):
buf = buf[:(-1)]
buf = (buf + CRLF)
conn.sendall(buf)
if callback:
callback(buf)
conn.close()
return self.voidresp()
|
'Send new account name.'
| def acct(self, password):
| cmd = ('ACCT ' + password)
return self.voidcmd(cmd)
|
'Return a list of files in a given directory (default the current).'
| def nlst(self, *args):
| cmd = 'NLST'
for arg in args:
cmd = (cmd + (' ' + arg))
files = []
self.retrlines(cmd, files.append)
return files
|
'List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)'
| def dir(self, *args):
| cmd = 'LIST'
func = None
if (args[(-1):] and (type(args[(-1)]) != type(''))):
(args, func) = (args[:(-1)], args[(-1)])
for arg in args:
if arg:
cmd = (cmd + (' ' + arg))
self.retrlines(cmd, func)
|
'Rename a file.'
| def rename(self, fromname, toname):
| resp = self.sendcmd(('RNFR ' + fromname))
if (resp[0] != '3'):
raise error_reply, resp
return self.voidcmd(('RNTO ' + toname))
|
'Delete a file.'
| def delete(self, filename):
| resp = self.sendcmd(('DELE ' + filename))
if (resp[:3] in ('250', '200')):
return resp
else:
raise error_reply, resp
|
'Change to a directory.'
| def cwd(self, dirname):
| if (dirname == '..'):
try:
return self.voidcmd('CDUP')
except error_perm as msg:
if (msg.args[0][:3] != '500'):
raise
elif (dirname == ''):
dirname = '.'
cmd = ('CWD ' + dirname)
return self.voidcmd(cmd)
|
'Retrieve the size of a file.'
| def size(self, filename):
| resp = self.sendcmd(('SIZE ' + filename))
if (resp[:3] == '213'):
s = resp[3:].strip()
try:
return int(s)
except (OverflowError, ValueError):
return long(s)
|
'Make a directory, return its full pathname.'
| def mkd(self, dirname):
| resp = self.sendcmd(('MKD ' + dirname))
return parse257(resp)
|
'Remove a directory.'
| def rmd(self, dirname):
| return self.voidcmd(('RMD ' + dirname))
|
'Return current working directory.'
| def pwd(self):
| resp = self.sendcmd('PWD')
return parse257(resp)
|
'Quit, and close the connection.'
| def quit(self):
| resp = self.voidcmd('QUIT')
self.close()
return resp
|
'Close the connection without assuming anything about it.'
| def close(self):
| try:
file = self.file
self.file = None
if (file is not None):
file.close()
finally:
sock = self.sock
self.sock = None
if (sock is not None):
sock.close()
|
'Return a list of hosts mentioned in the .netrc file.'
| def get_hosts(self):
| return self.__hosts.keys()
|
'Returns login information for the named host.
The return value is a triple containing userid,
password, and the accounting field.'
| def get_account(self, host):
| host = host.lower()
user = passwd = acct = None
if (host in self.__hosts):
(user, passwd, acct) = self.__hosts[host]
user = (user or self.__defuser)
passwd = (passwd or self.__defpasswd)
acct = (acct or self.__defacct)
return (user, passwd, acct)
|
'Return a list of all defined macro names.'
| def get_macros(self):
| return self.__macros.keys()
|
'Return a sequence of lines which define a named macro.'
| def get_macro(self, macro):
| return self.__macros[macro]
|
'Add a header line to the MIME message.
The key is the name of the header, where the value obviously provides
the value of the header. The optional argument prefix determines
where the header is inserted; 0 means append at the end, 1 means
insert at the start. The default is to append.'
| def addheader(self, key, value, prefix=0):
| lines = value.split('\n')
while (lines and (not lines[(-1)])):
del lines[(-1)]
while (lines and (not lines[0])):
del lines[0]
for i in range(1, len(lines)):
lines[i] = (' ' + lines[i].strip())
value = ('\n'.join(lines) + '\n')
line = ((key + ': ') + value)
if prefix:
self._headers.insert(0, line)
else:
self._headers.append(line)
|
'Writes out and forgets all headers accumulated so far.
This is useful if you don\'t need a body part at all; for example,
for a subpart of type message/rfc822 that\'s (mis)used to store some
header-like information.'
| def flushheaders(self):
| self._fp.writelines(self._headers)
self._headers = []
|
'Returns a file-like object for writing the body of the message.
The content-type is set to the provided ctype, and the optional
parameter, plist, provides additional parameters for the
content-type declaration. The optional argument prefix determines
where the header is inserted; 0 means append at the end, 1 means
insert at the start. The default is to insert at the start.'
| def startbody(self, ctype, plist=[], prefix=1):
| for (name, value) in plist:
ctype = (ctype + (';\n %s="%s"' % (name, value)))
self.addheader('Content-Type', ctype, prefix=prefix)
self.flushheaders()
self._fp.write('\n')
return self._fp
|
'Returns a file-like object for writing the body of the message.
Additionally, this method initializes the multi-part code, where the
subtype parameter provides the multipart subtype, the boundary
parameter may provide a user-defined boundary specification, and the
plist parameter provides optional parameters for the subtype. The
optional argument, prefix, determines where the header is inserted;
0 means append at the end, 1 means insert at the start. The default
is to insert at the start. Subparts should be created using the
nextpart() method.'
| def startmultipartbody(self, subtype, boundary=None, plist=[], prefix=1):
| self._boundary = (boundary or mimetools.choose_boundary())
return self.startbody(('multipart/' + subtype), ([('boundary', self._boundary)] + plist), prefix=prefix)
|
'Returns a new instance of MimeWriter which represents an
individual part in a multipart message.
This may be used to write the part as well as used for creating
recursively complex multipart messages. The message must first be
initialized with the startmultipartbody() method before using the
nextpart() method.'
| def nextpart(self):
| self._fp.write((('\n--' + self._boundary) + '\n'))
return self.__class__(self._fp)
|
'This is used to designate the last part of a multipart message.
It should always be used when writing multipart messages.'
| def lastpart(self):
| self._fp.write((('\n--' + self._boundary) + '--\n'))
|
'Constructs a Fraction.
Takes a string like \'3/2\' or \'1.5\', another Rational instance, a
numerator/denominator pair, or a float.
Examples
>>> Fraction(10, -8)
Fraction(-5, 4)
>>> Fraction(Fraction(1, 7), 5)
Fraction(1, 35)
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
Fraction(3, 14)
>>> Fraction(\'314\')
Fraction(314, 1)
>>> Fraction(\'-35/4\')
Fraction(-35, 4)
>>> Fraction(\'3.1415\') # conversion from numeric string
Fraction(6283, 2000)
>>> Fraction(\'-47e-2\') # string may include a decimal exponent
Fraction(-47, 100)
>>> Fraction(1.47) # direct construction from float (exact conversion)
Fraction(6620291452234629, 4503599627370496)
>>> Fraction(2.25)
Fraction(9, 4)
>>> Fraction(Decimal(\'1.47\'))
Fraction(147, 100)'
| def __new__(cls, numerator=0, denominator=None):
| self = super(Fraction, cls).__new__(cls)
if (denominator is None):
if isinstance(numerator, Rational):
self._numerator = numerator.numerator
self._denominator = numerator.denominator
return self
elif isinstance(numerator, float):
value = Fraction.from_float(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, Decimal):
value = Fraction.from_decimal(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, basestring):
m = _RATIONAL_FORMAT.match(numerator)
if (m is None):
raise ValueError(('Invalid literal for Fraction: %r' % numerator))
numerator = int((m.group('num') or '0'))
denom = m.group('denom')
if denom:
denominator = int(denom)
else:
denominator = 1
decimal = m.group('decimal')
if decimal:
scale = (10 ** len(decimal))
numerator = ((numerator * scale) + int(decimal))
denominator *= scale
exp = m.group('exp')
if exp:
exp = int(exp)
if (exp >= 0):
numerator *= (10 ** exp)
else:
denominator *= (10 ** (- exp))
if (m.group('sign') == '-'):
numerator = (- numerator)
else:
raise TypeError('argument should be a string or a Rational instance')
elif (isinstance(numerator, Rational) and isinstance(denominator, Rational)):
(numerator, denominator) = ((numerator.numerator * denominator.denominator), (denominator.numerator * numerator.denominator))
else:
raise TypeError('both arguments should be Rational instances')
if (denominator == 0):
raise ZeroDivisionError(('Fraction(%s, 0)' % numerator))
g = gcd(numerator, denominator)
self._numerator = (numerator // g)
self._denominator = (denominator // g)
return self
|
'Converts a finite float to a rational number, exactly.
Beware that Fraction.from_float(0.3) != Fraction(3, 10).'
| @classmethod
def from_float(cls, f):
| if isinstance(f, numbers.Integral):
return cls(f)
elif (not isinstance(f, float)):
raise TypeError(('%s.from_float() only takes floats, not %r (%s)' % (cls.__name__, f, type(f).__name__)))
if (math.isnan(f) or math.isinf(f)):
raise TypeError(('Cannot convert %r to %s.' % (f, cls.__name__)))
return cls(*f.as_integer_ratio())
|
'Converts a finite Decimal instance to a rational number, exactly.'
| @classmethod
def from_decimal(cls, dec):
| from decimal import Decimal
if isinstance(dec, numbers.Integral):
dec = Decimal(int(dec))
elif (not isinstance(dec, Decimal)):
raise TypeError(('%s.from_decimal() only takes Decimals, not %r (%s)' % (cls.__name__, dec, type(dec).__name__)))
if (not dec.is_finite()):
raise TypeError(('Cannot convert %s to %s.' % (dec, cls.__name__)))
(sign, digits, exp) = dec.as_tuple()
digits = int(''.join(map(str, digits)))
if sign:
digits = (- digits)
if (exp >= 0):
return cls((digits * (10 ** exp)))
else:
return cls(digits, (10 ** (- exp)))
|
'Closest Fraction to self with denominator at most max_denominator.
>>> Fraction(\'3.141592653589793\').limit_denominator(10)
Fraction(22, 7)
>>> Fraction(\'3.141592653589793\').limit_denominator(100)
Fraction(311, 99)
>>> Fraction(4321, 8765).limit_denominator(10000)
Fraction(4321, 8765)'
| def limit_denominator(self, max_denominator=1000000):
| if (max_denominator < 1):
raise ValueError('max_denominator should be at least 1')
if (self._denominator <= max_denominator):
return Fraction(self)
(p0, q0, p1, q1) = (0, 1, 1, 0)
(n, d) = (self._numerator, self._denominator)
while True:
a = (n // d)
q2 = (q0 + (a * q1))
if (q2 > max_denominator):
break
(p0, q0, p1, q1) = (p1, q1, (p0 + (a * p1)), q2)
(n, d) = (d, (n - (a * d)))
k = ((max_denominator - q0) // q1)
bound1 = Fraction((p0 + (k * p1)), (q0 + (k * q1)))
bound2 = Fraction(p1, q1)
if (abs((bound2 - self)) <= abs((bound1 - self))):
return bound2
else:
return bound1
|
'repr(self)'
| def __repr__(self):
| return ('Fraction(%s, %s)' % (self._numerator, self._denominator))
|
'str(self)'
| def __str__(self):
| if (self._denominator == 1):
return str(self._numerator)
else:
return ('%s/%s' % (self._numerator, self._denominator))
|
'Generates forward and reverse operators given a purely-rational
operator and a function from the operator module.
Use this like:
__op__, __rop__ = _operator_fallbacks(just_rational_op, operator.op)
In general, we want to implement the arithmetic operations so
that mixed-mode operations either call an implementation whose
author knew about the types of both arguments, or convert both
to the nearest built in type and do the operation there. In
Fraction, that means that we define __add__ and __radd__ as:
def __add__(self, other):
# Both types have numerators/denominator attributes,
# so do the operation directly
if isinstance(other, (int, long, Fraction)):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
# float and complex don\'t have those operations, but we
# know about those types, so special case them.
elif isinstance(other, float):
return float(self) + other
elif isinstance(other, complex):
return complex(self) + other
# Let the other type take over.
return NotImplemented
def __radd__(self, other):
# radd handles more types than add because there\'s
# nothing left to fall back to.
if isinstance(other, Rational):
return Fraction(self.numerator * other.denominator +
other.numerator * self.denominator,
self.denominator * other.denominator)
elif isinstance(other, Real):
return float(other) + float(self)
elif isinstance(other, Complex):
return complex(other) + complex(self)
return NotImplemented
There are 5 different cases for a mixed-type addition on
Fraction. I\'ll refer to all of the above code that doesn\'t
refer to Fraction, float, or complex as "boilerplate". \'r\'
will be an instance of Fraction, which is a subtype of
Rational (r : Fraction <: Rational), and b : B <:
Complex. The first three involve \'r + b\':
1. If B <: Fraction, int, float, or complex, we handle
that specially, and all is well.
2. If Fraction falls back to the boilerplate code, and it
were to return a value from __add__, we\'d miss the
possibility that B defines a more intelligent __radd__,
so the boilerplate should return NotImplemented from
__add__. In particular, we don\'t handle Rational
here, even though we could get an exact answer, in case
the other type wants to do something special.
3. If B <: Fraction, Python tries B.__radd__ before
Fraction.__add__. This is ok, because it was
implemented with knowledge of Fraction, so it can
handle those instances before delegating to Real or
Complex.
The next two situations describe \'b + r\'. We assume that b
didn\'t know about Fraction in its implementation, and that it
uses similar boilerplate code:
4. If B <: Rational, then __radd_ converts both to the
builtin rational type (hey look, that\'s us) and
proceeds.
5. Otherwise, __radd__ tries to find the nearest common
base ABC, and fall back to its builtin type. Since this
class doesn\'t subclass a concrete type, there\'s no
implementation to fall back to, so we need to try as
hard as possible to return an actual value, or the user
will get a TypeError.'
| def _operator_fallbacks(monomorphic_operator, fallback_operator):
| def forward(a, b):
if isinstance(b, (int, long, Fraction)):
return monomorphic_operator(a, b)
elif isinstance(b, float):
return fallback_operator(float(a), b)
elif isinstance(b, complex):
return fallback_operator(complex(a), b)
else:
return NotImplemented
forward.__name__ = (('__' + fallback_operator.__name__) + '__')
forward.__doc__ = monomorphic_operator.__doc__
def reverse(b, a):
if isinstance(a, Rational):
return monomorphic_operator(a, b)
elif isinstance(a, numbers.Real):
return fallback_operator(float(a), float(b))
elif isinstance(a, numbers.Complex):
return fallback_operator(complex(a), complex(b))
else:
return NotImplemented
reverse.__name__ = (('__r' + fallback_operator.__name__) + '__')
reverse.__doc__ = monomorphic_operator.__doc__
return (forward, reverse)
|
'a + b'
| def _add(a, b):
| return Fraction(((a.numerator * b.denominator) + (b.numerator * a.denominator)), (a.denominator * b.denominator))
|
'a - b'
| def _sub(a, b):
| return Fraction(((a.numerator * b.denominator) - (b.numerator * a.denominator)), (a.denominator * b.denominator))
|
'a * b'
| def _mul(a, b):
| return Fraction((a.numerator * b.numerator), (a.denominator * b.denominator))
|
'a / b'
| def _div(a, b):
| return Fraction((a.numerator * b.denominator), (a.denominator * b.numerator))
|
'a // b'
| def __floordiv__(a, b):
| div = (a / b)
if isinstance(div, Rational):
return (div.numerator // div.denominator)
else:
return math.floor(div)
|
'a // b'
| def __rfloordiv__(b, a):
| div = (a / b)
if isinstance(div, Rational):
return (div.numerator // div.denominator)
else:
return math.floor(div)
|
'a % b'
| def __mod__(a, b):
| div = (a // b)
return (a - (b * div))
|
'a % b'
| def __rmod__(b, a):
| div = (a // b)
return (a - (b * div))
|
'a ** b
If b is not an integer, the result will be a float or complex
since roots are generally irrational. If b is an integer, the
result will be rational.'
| def __pow__(a, b):
| if isinstance(b, Rational):
if (b.denominator == 1):
power = b.numerator
if (power >= 0):
return Fraction((a._numerator ** power), (a._denominator ** power))
else:
return Fraction((a._denominator ** (- power)), (a._numerator ** (- power)))
else:
return (float(a) ** float(b))
else:
return (float(a) ** b)
|
'a ** b'
| def __rpow__(b, a):
| if ((b._denominator == 1) and (b._numerator >= 0)):
return (a ** b._numerator)
if isinstance(a, Rational):
return (Fraction(a.numerator, a.denominator) ** b)
if (b._denominator == 1):
return (a ** b._numerator)
return (a ** float(b))
|
'+a: Coerces a subclass instance to Fraction'
| def __pos__(a):
| return Fraction(a._numerator, a._denominator)
|
'-a'
| def __neg__(a):
| return Fraction((- a._numerator), a._denominator)
|
'abs(a)'
| def __abs__(a):
| return Fraction(abs(a._numerator), a._denominator)
|
'trunc(a)'
| def __trunc__(a):
| if (a._numerator < 0):
return (- ((- a._numerator) // a._denominator))
else:
return (a._numerator // a._denominator)
|
'hash(self)
Tricky because values that are exactly representable as a
float must have the same hash as that float.'
| def __hash__(self):
| if (self._denominator == 1):
return hash(self._numerator)
if (self == float(self)):
return hash(float(self))
else:
return hash((self._numerator, self._denominator))
|
'a == b'
| def __eq__(a, b):
| if isinstance(b, Rational):
return ((a._numerator == b.numerator) and (a._denominator == b.denominator))
if (isinstance(b, numbers.Complex) and (b.imag == 0)):
b = b.real
if isinstance(b, float):
if (math.isnan(b) or math.isinf(b)):
return (0.0 == b)
else:
return (a == a.from_float(b))
else:
return NotImplemented
|
'Helper for comparison operators, for internal use only.
Implement comparison between a Rational instance `self`, and
either another Rational instance or a float `other`. If
`other` is not a Rational instance or a float, return
NotImplemented. `op` should be one of the six standard
comparison operators.'
| def _richcmp(self, other, op):
| if isinstance(other, Rational):
return op((self._numerator * other.denominator), (self._denominator * other.numerator))
if isinstance(other, complex):
raise TypeError('no ordering relation is defined for complex numbers')
if isinstance(other, float):
if (math.isnan(other) or math.isinf(other)):
return op(0.0, other)
else:
return op(self, self.from_float(other))
else:
return NotImplemented
|
'a < b'
| def __lt__(a, b):
| return a._richcmp(b, operator.lt)
|
'a > b'
| def __gt__(a, b):
| return a._richcmp(b, operator.gt)
|
'a <= b'
| def __le__(a, b):
| return a._richcmp(b, operator.le)
|
'a >= b'
| def __ge__(a, b):
| return a._richcmp(b, operator.ge)
|
'a != 0'
| def __nonzero__(a):
| return (a._numerator != 0)
|
'Create directories under ~'
| def create_home_path(self):
| if (not self.user):
return
home = convert_path(os.path.expanduser('~'))
for (name, path) in self.config_vars.iteritems():
if (path.startswith(home) and (not os.path.isdir(path))):
self.debug_print(("os.makedirs('%s', 0700)" % path))
os.makedirs(path, 448)
|
'Return true if the current distribution has any Python
modules to install.'
| def has_lib(self):
| return (self.distribution.has_pure_modules() or self.distribution.has_ext_modules())
|
'Deprecated API.'
| def check_metadata(self):
| warn('distutils.command.register.check_metadata is deprecated, use the check command instead', PendingDeprecationWarning)
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.strict = self.strict
check.restructuredtext = 1
check.run()
|
'Reads the configuration file and set attributes.'
| def _set_config(self):
| config = self._read_pypirc()
if (config != {}):
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
self.has_config = True
else:
if (self.repository not in ('pypi', self.DEFAULT_REPOSITORY)):
raise ValueError(('%s not found in .pypirc' % self.repository))
if (self.repository == 'pypi'):
self.repository = self.DEFAULT_REPOSITORY
self.has_config = False
|
'Fetch the list of classifiers from the server.'
| def classifiers(self):
| response = urllib2.urlopen((self.repository + '?:action=list_classifiers'))
log.info(response.read())
|
'Send the metadata to the package index server to be checked.'
| def verify_metadata(self):
| (code, result) = self.post_to_server(self.build_post_data('verify'))
log.info(('Server response (%s): %s' % (code, result)))
|
'Send the metadata to the package index server.
Well, do the following:
1. figure who the user is, and then
2. send the data as a Basic auth\'ed POST.
First we try to read the username/password from $HOME/.pypirc,
which is a ConfigParser-formatted file with a section
[distutils] containing username and password entries (both
in clear text). Eg:
[distutils]
index-servers =
pypi
[pypi]
username: fred
password: sekrit
Otherwise, to figure who the user is, we offer the user three
choices:
1. use existing login,
2. register as a new user, or
3. set the password to a random string and email the user.'
| def send_metadata(self):
| if self.has_config:
choice = '1'
username = self.username
password = self.password
else:
choice = 'x'
username = password = ''
choices = '1 2 3 4'.split()
while (choice not in choices):
self.announce('We need to know who you are, so please choose either:\n 1. use your existing login,\n 2. register as a new user,\n 3. have the server generate a new password for you (and email it to you), or\n 4. quit\nYour selection [default 1]: ', log.INFO)
choice = raw_input()
if (not choice):
choice = '1'
elif (choice not in choices):
print 'Please choose one of the four options!'
if (choice == '1'):
while (not username):
username = raw_input('Username: ')
while (not password):
password = getpass.getpass('Password: ')
auth = urllib2.HTTPPasswordMgr()
host = urlparse.urlparse(self.repository)[1]
auth.add_password(self.realm, host, username, password)
(code, result) = self.post_to_server(self.build_post_data('submit'), auth)
self.announce(('Server response (%s): %s' % (code, result)), log.INFO)
if (code == 200):
if self.has_config:
self.distribution.password = password
else:
self.announce('I can store your PyPI login so future submissions will be faster.', log.INFO)
self.announce(('(the login will be stored in %s)' % self._get_rc_file()), log.INFO)
choice = 'X'
while (choice.lower() not in 'yn'):
choice = raw_input('Save your login (y/N)?')
if (not choice):
choice = 'n'
if (choice.lower() == 'y'):
self._store_pypirc(username, password)
elif (choice == '2'):
data = {':action': 'user'}
data['name'] = data['password'] = data['email'] = ''
data['confirm'] = None
while (not data['name']):
data['name'] = raw_input('Username: ')
while (data['password'] != data['confirm']):
while (not data['password']):
data['password'] = getpass.getpass('Password: ')
while (not data['confirm']):
data['confirm'] = getpass.getpass(' Confirm: ')
if (data['password'] != data['confirm']):
data['password'] = ''
data['confirm'] = None
print "Password and confirm don't match!"
while (not data['email']):
data['email'] = raw_input(' EMail: ')
(code, result) = self.post_to_server(data)
if (code != 200):
log.info(('Server response (%s): %s' % (code, result)))
else:
log.info('You will receive an email shortly.')
log.info('Follow the instructions in it to complete registration.')
elif (choice == '3'):
data = {':action': 'password_reset'}
data['email'] = ''
while (not data['email']):
data['email'] = raw_input('Your email address: ')
(code, result) = self.post_to_server(data)
log.info(('Server response (%s): %s' % (code, result)))
|
'Post a query to the server, and return a string response.'
| def post_to_server(self, data, auth=None):
| if ('name' in data):
self.announce(('Registering %s to %s' % (data['name'], self.repository)), log.INFO)
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = ('\n--' + boundary)
end_boundary = (sep_boundary + '--')
chunks = []
for (key, value) in data.items():
if (type(value) not in (type([]), type(()))):
value = [value]
for value in value:
chunks.append(sep_boundary)
chunks.append(('\nContent-Disposition: form-data; name="%s"' % key))
chunks.append('\n\n')
chunks.append(value)
if (value and (value[(-1)] == '\r')):
chunks.append('\n')
chunks.append(end_boundary)
chunks.append('\n')
body = []
for chunk in chunks:
if isinstance(chunk, unicode):
body.append(chunk.encode('utf-8'))
else:
body.append(chunk)
body = ''.join(body)
headers = {'Content-type': ('multipart/form-data; boundary=%s; charset=utf-8' % boundary), 'Content-length': str(len(body))}
req = urllib2.Request(self.repository, body, headers)
opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(password_mgr=auth))
data = ''
try:
result = opener.open(req)
except urllib2.HTTPError as e:
if self.show_response:
data = e.fp.read()
result = (e.code, e.msg)
except urllib2.URLError as e:
result = (500, str(e))
else:
if self.show_response:
data = result.read()
result = (200, 'OK')
if self.show_response:
dashes = ('-' * 75)
self.announce(('%s%s%s' % (dashes, data, dashes)))
return result
|
'Generate list of \'(package,src_dir,build_dir,filenames)\' tuples'
| def get_data_files(self):
| data = []
if (not self.packages):
return data
for package in self.packages:
src_dir = self.get_package_dir(package)
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
plen = 0
if src_dir:
plen = (len(src_dir) + 1)
filenames = [file[plen:] for file in self.find_data_files(package, src_dir)]
data.append((package, src_dir, build_dir, filenames))
return data
|
'Return filenames for package\'s data files in \'src_dir\''
| def find_data_files(self, package, src_dir):
| globs = (self.package_data.get('', []) + self.package_data.get(package, []))
files = []
for pattern in globs:
filelist = glob(os.path.join(src_dir, convert_path(pattern)))
files.extend([fn for fn in filelist if ((fn not in files) and os.path.isfile(fn))])
return files
|
'Copy data files into build directory'
| def build_package_data(self):
| for (package, src_dir, build_dir, filenames) in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
self.copy_file(os.path.join(src_dir, filename), target, preserve_mode=False)
|
'Return the directory, relative to the top of the source
distribution, where package \'package\' should be found
(at least according to the \'package_dir\' option, if any).'
| def get_package_dir(self, package):
| path = package.split('.')
if (not self.package_dir):
if path:
return os.path.join(*path)
else:
return ''
else:
tail = []
while path:
try:
pdir = self.package_dir['.'.join(path)]
except KeyError:
tail.insert(0, path[(-1)])
del path[(-1)]
else:
tail.insert(0, pdir)
return os.path.join(*tail)
else:
pdir = self.package_dir.get('')
if (pdir is not None):
tail.insert(0, pdir)
if tail:
return os.path.join(*tail)
else:
return ''
|
'Finds individually-specified Python modules, ie. those listed by
module name in \'self.py_modules\'. Returns a list of tuples (package,
module_base, filename): \'package\' is a tuple of the path through
package-space to the module; \'module_base\' is the bare (no
packages, no dots) module name, and \'filename\' is the path to the
".py" file (relative to the distribution root) that implements the
module.'
| def find_modules(self):
| packages = {}
modules = []
for module in self.py_modules:
path = module.split('.')
package = '.'.join(path[0:(-1)])
module_base = path[(-1)]
try:
(package_dir, checked) = packages[package]
except KeyError:
package_dir = self.get_package_dir(package)
checked = 0
if (not checked):
init_py = self.check_package(package, package_dir)
packages[package] = (package_dir, 1)
if init_py:
modules.append((package, '__init__', init_py))
module_file = os.path.join(package_dir, (module_base + '.py'))
if (not self.check_module(module, module_file)):
continue
modules.append((package, module_base, module_file))
return modules
|
'Compute the list of all modules that will be built, whether
they are specified one-module-at-a-time (\'self.py_modules\') or
by whole packages (\'self.packages\'). Return a list of tuples
(package, module, module_file), just like \'find_modules()\' and
\'find_package_modules()\' do.'
| def find_all_modules(self):
| modules = []
if self.py_modules:
modules.extend(self.find_modules())
if self.packages:
for package in self.packages:
package_dir = self.get_package_dir(package)
m = self.find_package_modules(package, package_dir)
modules.extend(m)
return modules
|
'Check that \'self.compiler\' really is a CCompiler object;
if not, make it one.'
| def _check_compiler(self):
| from distutils.ccompiler import CCompiler, new_compiler
if (not isinstance(self.compiler, CCompiler)):
self.compiler = new_compiler(compiler=self.compiler, dry_run=self.dry_run, force=1)
customize_compiler(self.compiler)
if self.include_dirs:
self.compiler.set_include_dirs(self.include_dirs)
if self.libraries:
self.compiler.set_libraries(self.libraries)
if self.library_dirs:
self.compiler.set_library_dirs(self.library_dirs)
|
'Construct a source file from \'body\' (a string containing lines
of C/C++ code) and \'headers\' (a list of header files to include)
and run it through the preprocessor. Return true if the
preprocessor succeeded, false if there were any errors.
(\'body\' probably isn\'t of much use, but what the heck.)'
| def try_cpp(self, body=None, headers=None, include_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError
self._check_compiler()
ok = 1
try:
self._preprocess(body, headers, include_dirs, lang)
except CompileError:
ok = 0
self._clean()
return ok
|
'Construct a source file (just like \'try_cpp()\'), run it through
the preprocessor, and return true if any line of the output matches
\'pattern\'. \'pattern\' should either be a compiled regex object or a
string containing a regex. If both \'body\' and \'headers\' are None,
preprocesses an empty file -- which can be useful to determine the
symbols the preprocessor and compiler set by default.'
| def search_cpp(self, pattern, body=None, headers=None, include_dirs=None, lang='c'):
| self._check_compiler()
(src, out) = self._preprocess(body, headers, include_dirs, lang)
if isinstance(pattern, str):
pattern = re.compile(pattern)
file = open(out)
match = 0
while 1:
line = file.readline()
if (line == ''):
break
if pattern.search(line):
match = 1
break
file.close()
self._clean()
return match
|
'Try to compile a source file built from \'body\' and \'headers\'.
Return true on success, false otherwise.'
| def try_compile(self, body, headers=None, include_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError
self._check_compiler()
try:
self._compile(body, headers, include_dirs, lang)
ok = 1
except CompileError:
ok = 0
log.info(((ok and 'success!') or 'failure.'))
self._clean()
return ok
|
'Try to compile and link a source file, built from \'body\' and
\'headers\', to executable form. Return true on success, false
otherwise.'
| def try_link(self, body, headers=None, include_dirs=None, libraries=None, library_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError, LinkError
self._check_compiler()
try:
self._link(body, headers, include_dirs, libraries, library_dirs, lang)
ok = 1
except (CompileError, LinkError):
ok = 0
log.info(((ok and 'success!') or 'failure.'))
self._clean()
return ok
|
'Try to compile, link to an executable, and run a program
built from \'body\' and \'headers\'. Return true on success, false
otherwise.'
| def try_run(self, body, headers=None, include_dirs=None, libraries=None, library_dirs=None, lang='c'):
| from distutils.ccompiler import CompileError, LinkError
self._check_compiler()
try:
(src, obj, exe) = self._link(body, headers, include_dirs, libraries, library_dirs, lang)
self.spawn([exe])
ok = 1
except (CompileError, LinkError, DistutilsExecError):
ok = 0
log.info(((ok and 'success!') or 'failure.'))
self._clean()
return ok
|
'Determine if function \'func\' is available by constructing a
source file that refers to \'func\', and compiles and links it.
If everything succeeds, returns true; otherwise returns false.
The constructed source file starts out by including the header
files listed in \'headers\'. If \'decl\' is true, it then declares
\'func\' (as "int func()"); you probably shouldn\'t supply \'headers\'
and set \'decl\' true in the same call, or you might get errors about
a conflicting declarations for \'func\'. Finally, the constructed
\'main()\' function either references \'func\' or (if \'call\' is true)
calls it. \'libraries\' and \'library_dirs\' are used when
linking.'
| def check_func(self, func, headers=None, include_dirs=None, libraries=None, library_dirs=None, decl=0, call=0):
| self._check_compiler()
body = []
if decl:
body.append(('int %s ();' % func))
body.append('int main () {')
if call:
body.append((' %s();' % func))
else:
body.append((' %s;' % func))
body.append('}')
body = ('\n'.join(body) + '\n')
return self.try_link(body, headers, include_dirs, libraries, library_dirs)
|
'Determine if \'library\' is available to be linked against,
without actually checking that any particular symbols are provided
by it. \'headers\' will be used in constructing the source file to
be compiled, but the only effect of this is to check if all the
header files listed are available. Any libraries listed in
\'other_libraries\' will be included in the link, in case \'library\'
has symbols that depend on other libraries.'
| def check_lib(self, library, library_dirs=None, headers=None, include_dirs=None, other_libraries=[]):
| self._check_compiler()
return self.try_link('int main (void) { }', headers, include_dirs, ([library] + other_libraries), library_dirs)
|
'Determine if the system header file named by \'header_file\'
exists and can be found by the preprocessor; return true if so,
false otherwise.'
| def check_header(self, header, include_dirs=None, library_dirs=None, lang='c'):
| return self.try_cpp(body='/* No body */', headers=[header], include_dirs=include_dirs)
|
'Sets default values for options.'
| def initialize_options(self):
| self.restructuredtext = 0
self.metadata = 1
self.strict = 0
self._warnings = 0
|
'Counts the number of warnings that occurs.'
| def warn(self, msg):
| self._warnings += 1
return Command.warn(self, msg)
|
'Runs the command.'
| def run(self):
| if self.metadata:
self.check_metadata()
if self.restructuredtext:
if HAS_DOCUTILS:
self.check_restructuredtext()
elif self.strict:
raise DistutilsSetupError('The docutils package is needed.')
if (self.strict and (self._warnings > 0)):
raise DistutilsSetupError('Please correct your package.')
|
'Ensures that all required elements of meta-data are supplied.
name, version, URL, (author and author_email) or
(maintainer and maintainer_email)).
Warns if any are missing.'
| def check_metadata(self):
| metadata = self.distribution.metadata
missing = []
for attr in ('name', 'version', 'url'):
if (not (hasattr(metadata, attr) and getattr(metadata, attr))):
missing.append(attr)
if missing:
self.warn(('missing required meta-data: %s' % ', '.join(missing)))
if metadata.author:
if (not metadata.author_email):
self.warn(("missing meta-data: if 'author' supplied, " + "'author_email' must be supplied too"))
elif metadata.maintainer:
if (not metadata.maintainer_email):
self.warn(("missing meta-data: if 'maintainer' supplied, " + "'maintainer_email' must be supplied too"))
else:
self.warn((('missing meta-data: either (author and author_email) ' + 'or (maintainer and maintainer_email) ') + 'must be supplied'))
|
'Checks if the long string fields are reST-compliant.'
| def check_restructuredtext(self):
| data = self.distribution.get_long_description()
if (not isinstance(data, unicode)):
data = data.decode(PKG_INFO_ENCODING)
for warning in self._check_rst_data(data):
line = warning[(-1)].get('line')
if (line is None):
warning = warning[1]
else:
warning = ('%s (line %s)' % (warning[1], line))
self.warn(warning)
|
'Returns warnings when the provided data doesn\'t compile.'
| def _check_rst_data(self, data):
| source_path = StringIO()
parser = Parser()
settings = frontend.OptionParser(components=(Parser,)).get_default_values()
settings.tab_width = 4
settings.pep_references = None
settings.rfc_references = None
reporter = SilentReporter(source_path, settings.report_level, settings.halt_level, stream=settings.warning_stream, debug=settings.debug, encoding=settings.error_encoding, error_handler=settings.error_encoding_error_handler)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, (-1))
try:
parser.parse(data, document)
except AttributeError as e:
reporter.messages.append(((-1), ('Could not finish the parsing: %s.' % e), '', {}))
return reporter.messages
|
'Generate the text of an RPM spec file and return it as a
list of strings (one per line).'
| def _make_spec_file(self):
| spec_file = [('%define name ' + self.distribution.get_name()), ('%define version ' + self.distribution.get_version().replace('-', '_')), ('%define unmangled_version ' + self.distribution.get_version()), ('%define release ' + self.release.replace('-', '_')), '', ('Summary: ' + self.distribution.get_description())]
spec_file.extend(['Name: %{name}', 'Version: %{version}', 'Release: %{release}'])
if self.use_bzip2:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
else:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
spec_file.extend([('License: ' + self.distribution.get_license()), ('Group: ' + self.group), 'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot', 'Prefix: %{_prefix}'])
if (not self.force_arch):
if (not self.distribution.has_ext_modules()):
spec_file.append('BuildArch: noarch')
else:
spec_file.append(('BuildArch: %s' % self.force_arch))
for field in ('Vendor', 'Packager', 'Provides', 'Requires', 'Conflicts', 'Obsoletes'):
val = getattr(self, string.lower(field))
if isinstance(val, list):
spec_file.append(('%s: %s' % (field, string.join(val))))
elif (val is not None):
spec_file.append(('%s: %s' % (field, val)))
if (self.distribution.get_url() != 'UNKNOWN'):
spec_file.append(('Url: ' + self.distribution.get_url()))
if self.distribution_name:
spec_file.append(('Distribution: ' + self.distribution_name))
if self.build_requires:
spec_file.append(('BuildRequires: ' + string.join(self.build_requires)))
if self.icon:
spec_file.append(('Icon: ' + os.path.basename(self.icon)))
if self.no_autoreq:
spec_file.append('AutoReq: 0')
spec_file.extend(['', '%description', self.distribution.get_long_description()])
def_setup_call = ('%s %s' % (self.python, os.path.basename(sys.argv[0])))
def_build = ('%s build' % def_setup_call)
if self.use_rpm_opt_flags:
def_build = ('env CFLAGS="$RPM_OPT_FLAGS" ' + def_build)
install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT --record=INSTALLED_FILES' % def_setup_call)
script_options = [('prep', 'prep_script', '%setup -n %{name}-%{unmangled_version}'), ('build', 'build_script', def_build), ('install', 'install_script', install_cmd), ('clean', 'clean_script', 'rm -rf $RPM_BUILD_ROOT'), ('verifyscript', 'verify_script', None), ('pre', 'pre_install', None), ('post', 'post_install', None), ('preun', 'pre_uninstall', None), ('postun', 'post_uninstall', None)]
for (rpm_opt, attr, default) in script_options:
val = getattr(self, attr)
if (val or default):
spec_file.extend(['', ('%' + rpm_opt)])
if val:
spec_file.extend(string.split(open(val, 'r').read(), '\n'))
else:
spec_file.append(default)
spec_file.extend(['', '%files -f INSTALLED_FILES', '%defattr(-,root,root)'])
if self.doc_files:
spec_file.append(('%doc ' + string.join(self.doc_files)))
if self.changelog:
spec_file.extend(['', '%changelog'])
spec_file.extend(self.changelog)
return spec_file
|
'Format the changelog correctly and convert it to a list of strings'
| def _format_changelog(self, changelog):
| if (not changelog):
return changelog
new_changelog = []
for line in string.split(string.strip(changelog), '\n'):
line = string.strip(line)
if (line[0] == '*'):
new_changelog.extend(['', line])
elif (line[0] == '-'):
new_changelog.append(line)
else:
new_changelog.append((' ' + line))
if (not new_changelog[0]):
del new_changelog[0]
return new_changelog
|
'Ensure that the list of extensions (presumably provided as a
command option \'extensions\') is valid, i.e. it is a list of
Extension objects. We also support the old-style list of 2-tuples,
where the tuples are (ext_name, build_info), which are converted to
Extension instances here.
Raise DistutilsSetupError if the structure is invalid anywhere;
just returns otherwise.'
| def check_extensions_list(self, extensions):
| if (not isinstance(extensions, list)):
raise DistutilsSetupError, "'ext_modules' option must be a list of Extension instances"
for (i, ext) in enumerate(extensions):
if isinstance(ext, Extension):
continue
if ((not isinstance(ext, tuple)) or (len(ext) != 2)):
raise DistutilsSetupError, "each element of 'ext_modules' option must be an Extension instance or 2-tuple"
(ext_name, build_info) = ext
log.warn(("old-style (ext_name, build_info) tuple found in ext_modules for extension '%s'-- please convert to Extension instance" % ext_name))
if (not (isinstance(ext_name, str) and extension_name_re.match(ext_name))):
raise DistutilsSetupError, "first element of each tuple in 'ext_modules' must be the extension name (a string)"
if (not isinstance(build_info, dict)):
raise DistutilsSetupError, "second element of each tuple in 'ext_modules' must be a dictionary (build info)"
ext = Extension(ext_name, build_info['sources'])
for key in ('include_dirs', 'library_dirs', 'libraries', 'extra_objects', 'extra_compile_args', 'extra_link_args'):
val = build_info.get(key)
if (val is not None):
setattr(ext, key, val)
ext.runtime_library_dirs = build_info.get('rpath')
if ('def_file' in build_info):
log.warn("'def_file' element of build info dict no longer supported")
macros = build_info.get('macros')
if macros:
ext.define_macros = []
ext.undef_macros = []
for macro in macros:
if (not (isinstance(macro, tuple) and (len(macro) in (1, 2)))):
raise DistutilsSetupError, "'macros' element of build info dict must be 1- or 2-tuple"
if (len(macro) == 1):
ext.undef_macros.append(macro[0])
elif (len(macro) == 2):
ext.define_macros.append(macro)
extensions[i] = ext
|
'Walk the list of source files in \'sources\', looking for SWIG
interface (.i) files. Run SWIG on all that are found, and
return a modified \'sources\' list with SWIG source files replaced
by the generated C (or C++) files.'
| def swig_sources(self, sources, extension):
| new_sources = []
swig_sources = []
swig_targets = {}
if self.swig_cpp:
log.warn('--swig-cpp is deprecated - use --swig-opts=-c++')
if (self.swig_cpp or ('-c++' in self.swig_opts) or ('-c++' in extension.swig_opts)):
target_ext = '.cpp'
else:
target_ext = '.c'
for source in sources:
(base, ext) = os.path.splitext(source)
if (ext == '.i'):
new_sources.append(((base + '_wrap') + target_ext))
swig_sources.append(source)
swig_targets[source] = new_sources[(-1)]
else:
new_sources.append(source)
if (not swig_sources):
return new_sources
swig = (self.swig or self.find_swig())
swig_cmd = [swig, '-python']
swig_cmd.extend(self.swig_opts)
if self.swig_cpp:
swig_cmd.append('-c++')
if (not self.swig_opts):
for o in extension.swig_opts:
swig_cmd.append(o)
for source in swig_sources:
target = swig_targets[source]
log.info('swigging %s to %s', source, target)
self.spawn((swig_cmd + ['-o', target, source]))
return new_sources
|
'Return the name of the SWIG executable. On Unix, this is
just "swig" -- it should be in the PATH. Tries a bit harder on
Windows.'
| def find_swig(self):
| if (os.name == 'posix'):
return 'swig'
elif (os.name == 'nt'):
for vers in ('1.3', '1.2', '1.1'):
fn = os.path.join(('c:\\swig%s' % vers), 'swig.exe')
if os.path.isfile(fn):
return fn
else:
return 'swig.exe'
elif (os.name == 'os2'):
return 'swig.exe'
else:
raise DistutilsPlatformError, ("I don't know how to find (much less run) SWIG on platform '%s'" % os.name)
|
'Returns the path of the filename for a given extension.
The file is located in `build_lib` or directly in the package
(inplace option).'
| def get_ext_fullpath(self, ext_name):
| all_dots = string.maketrans(('/' + os.sep), '..')
ext_name = ext_name.translate(all_dots)
fullname = self.get_ext_fullname(ext_name)
modpath = fullname.split('.')
filename = self.get_ext_filename(ext_name)
filename = os.path.split(filename)[(-1)]
if (not self.inplace):
filename = os.path.join(*(modpath[:(-1)] + [filename]))
return os.path.join(self.build_lib, filename)
package = '.'.join(modpath[0:(-1)])
build_py = self.get_finalized_command('build_py')
package_dir = os.path.abspath(build_py.get_package_dir(package))
return os.path.join(package_dir, filename)
|
'Returns the fullname of a given extension name.
Adds the `package.` prefix'
| def get_ext_fullname(self, ext_name):
| if (self.package is None):
return ext_name
else:
return ((self.package + '.') + ext_name)
|
'Convert the name of an extension (eg. "foo.bar") into the name
of the file from which it will be loaded (eg. "foo/bar.so", or
"foo\bar.pyd").'
| def get_ext_filename(self, ext_name):
| from distutils.sysconfig import get_config_var
ext_path = string.split(ext_name, '.')
if (os.name == 'os2'):
ext_path[(len(ext_path) - 1)] = ext_path[(len(ext_path) - 1)][:8]
so_ext = get_config_var('SO')
if ((os.name == 'nt') and self.debug):
return ((os.path.join(*ext_path) + '_d') + so_ext)
return (os.path.join(*ext_path) + so_ext)
|
'Return the list of symbols that a shared extension has to
export. This either uses \'ext.export_symbols\' or, if it\'s not
provided, "init" + module_name. Only relevant on Windows, where
the .pyd file (DLL) must export the module "init" function.'
| def get_export_symbols(self, ext):
| initfunc_name = ('init' + ext.name.split('.')[(-1)])
if (initfunc_name not in ext.export_symbols):
ext.export_symbols.append(initfunc_name)
return ext.export_symbols
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.