desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'od.keys() -> list of keys in od'
def keys(self):
return list(self)
'od.values() -> list of values in od'
def values(self):
return [self[key] for key in self]
'od.items() -> list of (key, value) pairs in od'
def items(self):
return [(key, self[key]) for key in self]
'od.iterkeys() -> an iterator over the keys in od'
def iterkeys(self):
return iter(self)
'od.itervalues -> an iterator over the values in od'
def itervalues(self):
for k in self: (yield self[k])
'od.iteritems -> an iterator over the (key, value) items in od'
def iteritems(self):
for k in self: (yield (k, self[k]))
'od.update(E, **F) -> None. Update od from dict/iterable E and F. If E is a dict instance, does: for k in E: od[k] = E[k] If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] Or if E is an iterable of items, does: for k, v in E: od[k] = v In either case, this is followed by: for k, v in F.items(): od[k] = v'
def update(*args, **kwds):
if (len(args) > 2): raise TypeError(('update() takes at most 2 positional arguments (%d given)' % (len(args),))) elif (not args): raise TypeError('update() takes at least 1 argument (0 given)') self = args[0] other = () if (len(args) == 2): other = args[1] if isinstance(other, dict): for key in other: self[key] = other[key] elif hasattr(other, 'keys'): for key in other.keys(): self[key] = other[key] else: for (key, value) in other: self[key] = value for (key, value) in kwds.items(): self[key] = value
'od.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised.'
def pop(self, key, default=__marker):
if (key in self): result = self[key] del self[key] return result if (default is self.__marker): raise KeyError(key) return default
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
def setdefault(self, key, default=None):
if (key in self): return self[key] self[key] = default return default
'od.__repr__() <==> repr(od)'
def __repr__(self, _repr_running={}):
call_key = (id(self), _get_ident()) if (call_key in _repr_running): return '...' _repr_running[call_key] = 1 try: if (not self): return ('%s()' % (self.__class__.__name__,)) return ('%s(%r)' % (self.__class__.__name__, self.items())) finally: del _repr_running[call_key]
'Return state information for pickling'
def __reduce__(self):
items = [[k, self[k]] for k in self] inst_dict = vars(self).copy() for k in vars(OrderedDict()): inst_dict.pop(k, None) if inst_dict: return (self.__class__, (items,), inst_dict) return (self.__class__, (items,))
'od.copy() -> a shallow copy of od'
def copy(self):
return self.__class__(self)
'OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S and values equal to v (which defaults to None).'
@classmethod def fromkeys(cls, iterable, value=None):
d = cls() for key in iterable: d[key] = value return d
'od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive while comparison to a regular mapping is order-insensitive.'
def __eq__(self, other):
if isinstance(other, OrderedDict): return ((len(self) == len(other)) and (self.items() == other.items())) return dict.__eq__(self, other)
'od.viewkeys() -> a set-like object providing a view on od\'s keys'
def viewkeys(self):
return KeysView(self)
'od.viewvalues() -> an object providing a view on od\'s values'
def viewvalues(self):
return ValuesView(self)
'od.viewitems() -> a set-like object providing a view on od\'s items'
def viewitems(self):
return ItemsView(self)
'A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. Supports constructing :class:`~urllib3.fields.RequestField` from parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: \'foo\': \'bar\', \'fakefile\': (\'foofile.txt\', \'contents of foofile\'), \'realfile\': (\'barfile.txt\', open(\'realfile\').read()), \'typedfile\': (\'bazfile.bin\', open(\'bazfile\').read(), \'image/jpeg\'), \'nonamefile\': \'contents of nonamefile field\', Field names and filenames must be unicode.'
@classmethod def from_tuples(cls, fieldname, value):
if isinstance(value, tuple): if (len(value) == 3): (filename, data, content_type) = value else: (filename, data) = value content_type = guess_content_type(filename) else: filename = None content_type = None data = value request_param = cls(fieldname, data, filename=filename) request_param.make_multipart(content_type=content_type) return request_param
'Overridable helper function to format a single header parameter. :param name: The name of the parameter, a string expected to be ASCII only. :param value: The value of the parameter, provided as a unicode string.'
def _render_part(self, name, value):
return format_header_param(name, value)
'Helper function to format and quote a single header. Useful for single headers that are composed of multiple items. E.g., \'Content-Disposition\' fields. :param header_parts: A sequence of (k, v) typles or a :class:`dict` of (k, v) to format as `k1="v1"; k2="v2"; ...`.'
def _render_parts(self, header_parts):
parts = [] iterable = header_parts if isinstance(header_parts, dict): iterable = header_parts.items() for (name, value) in iterable: if value: parts.append(self._render_part(name, value)) return '; '.join(parts)
'Renders the headers for this request field.'
def render_headers(self):
lines = [] sort_keys = ['Content-Disposition', 'Content-Type', 'Content-Location'] for sort_key in sort_keys: if self.headers.get(sort_key, False): lines.append(('%s: %s' % (sort_key, self.headers[sort_key]))) for (header_name, header_value) in self.headers.items(): if (header_name not in sort_keys): if header_value: lines.append(('%s: %s' % (header_name, header_value))) lines.append('\r\n') return '\r\n'.join(lines)
'Makes this request field into a multipart request field. This method overrides "Content-Disposition", "Content-Type" and "Content-Location" headers to the request parameter. :param content_type: The \'Content-Type\' of the request body. :param content_location: The \'Content-Location\' of the request body.'
def make_multipart(self, content_disposition=None, content_type=None, content_location=None):
self.headers['Content-Disposition'] = (content_disposition or 'form-data') self.headers['Content-Disposition'] += '; '.join(['', self._render_parts((('name', self._name), ('filename', self._filename)))]) self.headers['Content-Type'] = content_type self.headers['Content-Location'] = content_location
'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised.'
def pop(self, key, default=__marker):
try: value = self[key] except KeyError: if (default is self.__marker): raise return default else: del self[key] return value
'Adds a (name, value) pair, doesn\'t overwrite the value if it already exists. >>> headers = HTTPHeaderDict(foo=\'bar\') >>> headers.add(\'Foo\', \'baz\') >>> headers[\'foo\'] \'bar, baz\''
def add(self, key, val):
key_lower = key.lower() new_vals = (key, val) vals = self._container.setdefault(key_lower, new_vals) if (new_vals is not vals): if isinstance(vals, list): vals.append(val) else: self._container[key_lower] = [vals[0], vals[1], val]
'Generic import function for any type of header-like object. Adapted version of MutableMapping.update in order to insert items with self.add instead of self.__setitem__'
def extend(self, *args, **kwargs):
if (len(args) > 1): raise TypeError('extend() takes at most 1 positional arguments ({0} given)'.format(len(args))) other = (args[0] if (len(args) >= 1) else ()) if isinstance(other, HTTPHeaderDict): for (key, val) in other.iteritems(): self.add(key, val) elif isinstance(other, Mapping): for key in other: self.add(key, other[key]) elif hasattr(other, 'keys'): for key in other.keys(): self.add(key, other[key]) else: for (key, value) in other: self.add(key, value) for (key, value) in kwargs.items(): self.add(key, value)
'Returns a list of all the values for the named field. Returns an empty list if the key doesn\'t exist.'
def getlist(self, key):
try: vals = self._container[key.lower()] except KeyError: return [] else: if isinstance(vals, tuple): return [vals[1]] else: return vals[1:]
'Iterate over all header lines, including duplicate ones.'
def iteritems(self):
for key in self: vals = self._container[key.lower()] for val in vals[1:]: (yield (vals[0], val))
'Iterate over all headers, merging duplicate ones together.'
def itermerged(self):
for key in self: val = self._container[key.lower()] (yield (val[0], ', '.join(val[1:])))
'Read headers from a Python 2 httplib message object.'
@classmethod def from_httplib(cls, message):
headers = [] for line in message.headers: if line.startswith((' ', ' DCTB ')): (key, value) = headers[(-1)] headers[(-1)] = (key, ((value + '\r\n') + line.rstrip())) continue (key, value) = line.split(':', 1) headers.append((key, value.strip())) return cls(headers)
'Establish a socket connection and set nodelay settings on it. :return: New socket connection.'
def _new_conn(self):
extra_kw = {} if self.source_address: extra_kw['source_address'] = self.source_address if self.socket_options: extra_kw['socket_options'] = self.socket_options try: conn = connection.create_connection((self.host, self.port), self.timeout, **extra_kw) except SocketTimeout as e: raise ConnectTimeoutError(self, ('Connection to %s timed out. (connect timeout=%s)' % (self.host, self.timeout))) except SocketError as e: raise NewConnectionError(self, ('Failed to establish a new connection: %s' % e)) return conn
'Check that a timeout attribute is valid. :param value: The timeout value to validate :param name: The name of the timeout attribute to validate. This is used to specify in error messages. :return: The validated and casted version of the given value. :raises ValueError: If the type is not an integer or a float, or if it is a numeric value less than zero.'
@classmethod def _validate_timeout(cls, value, name):
if (value is _Default): return cls.DEFAULT_TIMEOUT if ((value is None) or (value is cls.DEFAULT_TIMEOUT)): return value try: float(value) except (TypeError, ValueError): raise ValueError(('Timeout value %s was %s, but it must be an int or float.' % (name, value))) try: if (value < 0): raise ValueError(('Attempted to set %s timeout to %s, but the timeout cannot be set to a value less than 0.' % (name, value))) except TypeError: raise ValueError(('Timeout value %s was %s, but it must be an int or float.' % (name, value))) return value
'Create a new Timeout from a legacy timeout value. The timeout value used by httplib.py sets the same timeout on the connect(), and recv() socket requests. This creates a :class:`Timeout` object that sets the individual timeouts to the ``timeout`` value passed to this function. :param timeout: The legacy timeout value. :type timeout: integer, float, sentinel default object, or None :return: Timeout object :rtype: :class:`Timeout`'
@classmethod def from_float(cls, timeout):
return Timeout(read=timeout, connect=timeout)
'Create a copy of the timeout object Timeout properties are stored per-pool but each request needs a fresh Timeout object to ensure each one has its own start/stop configured. :return: a copy of the timeout object :rtype: :class:`Timeout`'
def clone(self):
return Timeout(connect=self._connect, read=self._read, total=self.total)
'Start the timeout clock, used during a connect() attempt :raises urllib3.exceptions.TimeoutStateError: if you attempt to start a timer that has been started already.'
def start_connect(self):
if (self._start_connect is not None): raise TimeoutStateError('Timeout timer has already been started.') self._start_connect = current_time() return self._start_connect
'Gets the time elapsed since the call to :meth:`start_connect`. :return: Elapsed time. :rtype: float :raises urllib3.exceptions.TimeoutStateError: if you attempt to get duration for a timer that hasn\'t been started.'
def get_connect_duration(self):
if (self._start_connect is None): raise TimeoutStateError("Can't get connect duration for timer that has not started.") return (current_time() - self._start_connect)
'Get the value to use when setting a connection timeout. This will be a positive float or integer, the value None (never timeout), or the default system timeout. :return: Connect timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None'
@property def connect_timeout(self):
if (self.total is None): return self._connect if ((self._connect is None) or (self._connect is self.DEFAULT_TIMEOUT)): return self.total return min(self._connect, self.total)
'Get the value for the read timeout. This assumes some time has elapsed in the connection timeout and computes the read timeout appropriately. If self.total is set, the read timeout is dependent on the amount of time taken by the connect timeout. If the connection time has not been established, a :exc:`~urllib3.exceptions.TimeoutStateError` will be raised. :return: Value to use for the read timeout. :rtype: int, float, :attr:`Timeout.DEFAULT_TIMEOUT` or None :raises urllib3.exceptions.TimeoutStateError: If :meth:`start_connect` has not yet been called on this object.'
@property def read_timeout(self):
if ((self.total is not None) and (self.total is not self.DEFAULT_TIMEOUT) and (self._read is not None) and (self._read is not self.DEFAULT_TIMEOUT)): if (self._start_connect is None): return self._read return max(0, min((self.total - self.get_connect_duration()), self._read)) elif ((self.total is not None) and (self.total is not self.DEFAULT_TIMEOUT)): return max(0, (self.total - self.get_connect_duration())) else: return self._read
'Backwards-compatibility for the old retries format.'
@classmethod def from_int(cls, retries, redirect=True, default=None):
if (retries is None): retries = (default if (default is not None) else cls.DEFAULT) if isinstance(retries, Retry): return retries redirect = (bool(redirect) and None) new_retries = cls(retries, redirect=redirect) log.debug(('Converted retries value: %r -> %r' % (retries, new_retries))) return new_retries
'Formula for computing the current backoff :rtype: float'
def get_backoff_time(self):
if (self._observed_errors <= 1): return 0 backoff_value = (self.backoff_factor * (2 ** (self._observed_errors - 1))) return min(self.BACKOFF_MAX, backoff_value)
'Sleep between retry attempts using an exponential backoff. By default, the backoff factor is 0 and this method will return immediately.'
def sleep(self):
backoff = self.get_backoff_time() if (backoff <= 0): return time.sleep(backoff)
'Errors when we\'re fairly sure that the server did not receive the request, so it should be safe to retry.'
def _is_connection_error(self, err):
return isinstance(err, ConnectTimeoutError)
'Errors that occur after the request has been started, so we should assume that the server began processing it.'
def _is_read_error(self, err):
return isinstance(err, (ReadTimeoutError, ProtocolError))
'Is this method/status code retryable? (Based on method/codes whitelists)'
def is_forced_retry(self, method, status_code):
if (self.method_whitelist and (method.upper() not in self.method_whitelist)): return False return (self.status_forcelist and (status_code in self.status_forcelist))
'Are we out of retries?'
def is_exhausted(self):
retry_counts = (self.total, self.connect, self.read, self.redirect) retry_counts = list(filter(None, retry_counts)) if (not retry_counts): return False return (min(retry_counts) < 0)
'Return a new Retry object with incremented retry counters. :param response: A response object, or None, if the server did not return a response. :type response: :class:`~urllib3.response.HTTPResponse` :param Exception error: An error encountered during the request, or None if the response was received successfully. :return: A new ``Retry`` object.'
def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None):
if ((self.total is False) and error): raise six.reraise(type(error), error, _stacktrace) total = self.total if (total is not None): total -= 1 _observed_errors = self._observed_errors connect = self.connect read = self.read redirect = self.redirect cause = 'unknown' if (error and self._is_connection_error(error)): if (connect is False): raise six.reraise(type(error), error, _stacktrace) elif (connect is not None): connect -= 1 _observed_errors += 1 elif (error and self._is_read_error(error)): if (read is False): raise six.reraise(type(error), error, _stacktrace) elif (read is not None): read -= 1 _observed_errors += 1 elif (response and response.get_redirect_location()): if (redirect is not None): redirect -= 1 cause = 'too many redirects' else: _observed_errors += 1 cause = ResponseError.GENERIC_ERROR if (response and response.status): cause = ResponseError.SPECIFIC_ERROR.format(status_code=response.status) new_retry = self.new(total=total, connect=connect, read=read, redirect=redirect, _observed_errors=_observed_errors) if new_retry.is_exhausted(): raise MaxRetryError(_pool, url, (error or ResponseError(cause))) log.debug(("Incremented Retry for (url='%s'): %r" % (url, new_retry))) return new_retry
'For backwards-compatibility with urlparse. We\'re nice like that.'
@property def hostname(self):
return self.host
'Absolute path including the query string.'
@property def request_uri(self):
uri = (self.path or '/') if (self.query is not None): uri += ('?' + self.query) return uri
'Network location including host and port'
@property def netloc(self):
if self.port: return ('%s:%d' % (self.host, self.port)) return self.host
'Convert self into a url This function should more or less round-trip with :func:`.parse_url`. The returned url may not be exactly the same as the url inputted to :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls with a blank port will have : removed). Example: :: >>> U = parse_url(\'http://google.com/mail/\') >>> U.url \'http://google.com/mail/\' >>> Url(\'http\', \'username:password\', \'host.com\', 80, ... \'/path\', \'query\', \'fragment\').url \'http://username:[email protected]:80/path?query#fragment\''
@property def url(self):
(scheme, auth, host, port, path, query, fragment) = self url = '' if (scheme is not None): url += (scheme + '://') if (auth is not None): url += (auth + '@') if (host is not None): url += host if (port is not None): url += (':' + str(port)) if (path is not None): url += path if (query is not None): url += ('?' + query) if (fragment is not None): url += ('#' + fragment) return url
'Close all pooled connections and disable the pool.'
def close():
pass
'Return a fresh :class:`HTTPConnection`.'
def _new_conn(self):
self.num_connections += 1 log.info(('Starting new HTTP connection (%d): %s' % (self.num_connections, self.host))) conn = self.ConnectionCls(host=self.host, port=self.port, timeout=self.timeout.connect_timeout, strict=self.strict, **self.conn_kw) return conn
'Get a connection. Will return a pooled connection if one is available. If no connections are available and :prop:`.block` is ``False``, then a fresh connection is returned. :param timeout: Seconds to wait before giving up and raising :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and :prop:`.block` is ``True``.'
def _get_conn(self, timeout=None):
conn = None try: conn = self.pool.get(block=self.block, timeout=timeout) except AttributeError: raise ClosedPoolError(self, 'Pool is closed.') except Empty: if self.block: raise EmptyPoolError(self, 'Pool reached maximum size and no more connections are allowed.') pass if (conn and is_connection_dropped(conn)): log.info(('Resetting dropped connection: %s' % self.host)) conn.close() if (getattr(conn, 'auto_open', 1) == 0): conn = None return (conn or self._new_conn())
'Put a connection back into the pool. :param conn: Connection object for the current host and port as returned by :meth:`._new_conn` or :meth:`._get_conn`. If the pool is already full, the connection is closed and discarded because we exceeded maxsize. If connections are discarded frequently, then maxsize should be increased. If the pool is closed, then the connection will be closed and discarded.'
def _put_conn(self, conn):
try: self.pool.put(conn, block=False) return except AttributeError: pass except Full: log.warning(('Connection pool is full, discarding connection: %s' % self.host)) if conn: conn.close()
'Called right before a request is made, after the socket is created.'
def _validate_conn(self, conn):
pass
'Helper that always returns a :class:`urllib3.util.Timeout`'
def _get_timeout(self, timeout):
if (timeout is _Default): return self.timeout.clone() if isinstance(timeout, Timeout): return timeout.clone() else: return Timeout.from_float(timeout)
'Is the error actually a timeout? Will raise a ReadTimeout or pass'
def _raise_timeout(self, err, url, timeout_value):
if isinstance(err, SocketTimeout): raise ReadTimeoutError(self, url, ('Read timed out. (read timeout=%s)' % timeout_value)) if (hasattr(err, 'errno') and (err.errno in _blocking_errnos)): raise ReadTimeoutError(self, url, ('Read timed out. (read timeout=%s)' % timeout_value)) if (('timed out' in str(err)) or ('did not complete (read)' in str(err))): raise ReadTimeoutError(self, url, ('Read timed out. (read timeout=%s)' % timeout_value))
'Perform a request on a given urllib connection object taken from our pool. :param conn: a connection from one of our connection pools :param timeout: Socket timeout in seconds for the request. This can be a float or integer, which will set the same timeout value for the socket connect and the socket read, or an instance of :class:`urllib3.util.Timeout`, which gives you more fine-grained control over your timeouts.'
def _make_request(self, conn, method, url, timeout=_Default, **httplib_request_kw):
self.num_requests += 1 timeout_obj = self._get_timeout(timeout) timeout_obj.start_connect() conn.timeout = timeout_obj.connect_timeout try: self._validate_conn(conn) except (SocketTimeout, BaseSSLError) as e: self._raise_timeout(err=e, url=url, timeout_value=conn.timeout) raise conn.request(method, url, **httplib_request_kw) read_timeout = timeout_obj.read_timeout if getattr(conn, 'sock', None): if (read_timeout == 0): raise ReadTimeoutError(self, url, ('Read timed out. (read timeout=%s)' % read_timeout)) if (read_timeout is Timeout.DEFAULT_TIMEOUT): conn.sock.settimeout(socket.getdefaulttimeout()) else: conn.sock.settimeout(read_timeout) try: try: httplib_response = conn.getresponse(buffering=True) except TypeError: httplib_response = conn.getresponse() except (SocketTimeout, BaseSSLError, SocketError) as e: self._raise_timeout(err=e, url=url, timeout_value=read_timeout) raise http_version = getattr(conn, '_http_vsn_str', 'HTTP/?') log.debug(('"%s %s %s" %s %s' % (method, url, http_version, httplib_response.status, httplib_response.length))) try: assert_header_parsing(httplib_response.msg) except HeaderParsingError as hpe: log.warning('Failed to parse headers (url=%s): %s', self._absolute_url(url), hpe, exc_info=True) return httplib_response
'Close all pooled connections and disable the pool.'
def close(self):
(old_pool, self.pool) = (self.pool, None) try: while True: conn = old_pool.get(block=False) if conn: conn.close() except Empty: pass
'Check if the given ``url`` is a member of the same host as this connection pool.'
def is_same_host(self, url):
if url.startswith('/'): return True (scheme, host, port) = get_host(url) if (self.port and (not port)): port = port_by_scheme.get(scheme) elif ((not self.port) and (port == port_by_scheme.get(scheme))): port = None return ((scheme, host, port) == (self.scheme, self.host, self.port))
'Get a connection from the pool and perform an HTTP request. This is the lowest level call for making a request, so you\'ll need to specify all the raw details. .. note:: More commonly, it\'s appropriate to use a convenience method provided by :class:`.RequestMethods`, such as :meth:`request`. .. note:: `release_conn` will only behave as expected if `preload_content=False` because we want to make `preload_content=False` the default behaviour someday soon without breaking backwards compatibility. :param method: HTTP request method (such as GET, POST, PUT, etc.) :param body: Data to send in the request body (useful for creating POST requests, see HTTPConnectionPool.post_url for more convenience). :param headers: Dictionary of custom headers to send, such as User-Agent, If-None-Match, etc. If None, pool headers are used. If provided, these headers completely replace any pool-specific headers. :param retries: Configure the number of retries to allow before raising a :class:`~urllib3.exceptions.MaxRetryError` exception. Pass ``None`` to retry until you receive a response. Pass a :class:`~urllib3.util.retry.Retry` object for fine-grained control over different types of retries. Pass an integer number to retry connection errors that many times, but no other types of errors. Pass zero to never retry. If ``False``, then retries are disabled and any exception is raised immediately. Also, instead of raising a MaxRetryError on redirects, the redirect response will be returned. :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int. :param redirect: If True, automatically handle redirects (status codes 301, 302, 303, 307, 308). Each redirect counts as a retry. Disabling retries will disable redirect, too. :param assert_same_host: If ``True``, will make sure that the host of the pool requests is consistent else will raise HostChangedError. When False, you can use the pool on an HTTP proxy and request foreign hosts. :param timeout: If specified, overrides the default timeout for this one request. It may be a float (in seconds) or an instance of :class:`urllib3.util.Timeout`. :param pool_timeout: If set and the pool is set to block=True, then this method will block for ``pool_timeout`` seconds and raise EmptyPoolError if no connection is available within the time period. :param release_conn: If False, then the urlopen call will not release the connection back into the pool once a response is received (but will release if you read the entire contents of the response such as when `preload_content=True`). This is useful if you\'re not preloading the response\'s content immediately. You will need to call ``r.release_conn()`` on the response ``r`` to return the connection back into the pool. If None, it takes the value of ``response_kw.get(\'preload_content\', True)``. :param \**response_kw: Additional parameters are passed to :meth:`urllib3.response.HTTPResponse.from_httplib`'
def urlopen(self, method, url, body=None, headers=None, retries=None, redirect=True, assert_same_host=True, timeout=_Default, pool_timeout=None, release_conn=None, **response_kw):
if (headers is None): headers = self.headers if (not isinstance(retries, Retry)): retries = Retry.from_int(retries, redirect=redirect, default=self.retries) if (release_conn is None): release_conn = response_kw.get('preload_content', True) if (assert_same_host and (not self.is_same_host(url))): raise HostChangedError(self, url, retries) conn = None if (self.scheme == 'http'): headers = headers.copy() headers.update(self.proxy_headers) err = None try: timeout_obj = self._get_timeout(timeout) conn = self._get_conn(timeout=pool_timeout) conn.timeout = timeout_obj.connect_timeout is_new_proxy_conn = ((self.proxy is not None) and (not getattr(conn, 'sock', None))) if is_new_proxy_conn: self._prepare_proxy(conn) httplib_response = self._make_request(conn, method, url, timeout=timeout_obj, body=body, headers=headers) response_conn = ((not release_conn) and conn) response = HTTPResponse.from_httplib(httplib_response, pool=self, connection=response_conn, **response_kw) except Empty: raise EmptyPoolError(self, 'No pool connections are available.') except (BaseSSLError, CertificateError) as e: conn = (conn and conn.close()) release_conn = True raise SSLError(e) except SSLError: conn = (conn and conn.close()) release_conn = True raise except (TimeoutError, HTTPException, SocketError, ProtocolError) as e: conn = (conn and conn.close()) release_conn = True if (isinstance(e, (SocketError, NewConnectionError)) and self.proxy): e = ProxyError('Cannot connect to proxy.', e) elif isinstance(e, (SocketError, HTTPException)): e = ProtocolError('Connection aborted.', e) retries = retries.increment(method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]) retries.sleep() err = e finally: if release_conn: self._put_conn(conn) if (not conn): log.warning(("Retrying (%r) after connection broken by '%r': %s" % (retries, err, url))) return self.urlopen(method, url, body, headers, retries, redirect, assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, **response_kw) redirect_location = (redirect and response.get_redirect_location()) if redirect_location: if (response.status == 303): method = 'GET' try: retries = retries.increment(method, url, response=response, _pool=self) except MaxRetryError: if retries.raise_on_redirect: response.release_conn() raise return response log.info(('Redirecting %s -> %s' % (url, redirect_location))) return self.urlopen(method, redirect_location, body, headers, retries=retries, redirect=redirect, assert_same_host=assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, **response_kw) if retries.is_forced_retry(method, status_code=response.status): retries = retries.increment(method, url, response=response, _pool=self) retries.sleep() log.info(('Forced retry: %s' % url)) return self.urlopen(method, url, body, headers, retries=retries, redirect=redirect, assert_same_host=assert_same_host, timeout=timeout, pool_timeout=pool_timeout, release_conn=release_conn, **response_kw) return response
'Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket` and establish the tunnel if proxy is used.'
def _prepare_conn(self, conn):
if isinstance(conn, VerifiedHTTPSConnection): conn.set_cert(key_file=self.key_file, cert_file=self.cert_file, cert_reqs=self.cert_reqs, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, assert_hostname=self.assert_hostname, assert_fingerprint=self.assert_fingerprint) conn.ssl_version = self.ssl_version return conn
'Establish tunnel connection early, because otherwise httplib would improperly set Host: header to proxy\'s IP:port.'
def _prepare_proxy(self, conn):
try: set_tunnel = conn.set_tunnel except AttributeError: set_tunnel = conn._set_tunnel if ((sys.version_info <= (2, 6, 4)) and (not self.proxy_headers)): set_tunnel(self.host, self.port) else: set_tunnel(self.host, self.port, self.proxy_headers) conn.connect()
'Return a fresh :class:`httplib.HTTPSConnection`.'
def _new_conn(self):
self.num_connections += 1 log.info(('Starting new HTTPS connection (%d): %s' % (self.num_connections, self.host))) if ((not self.ConnectionCls) or (self.ConnectionCls is DummyConnection)): raise SSLError("Can't connect to HTTPS URL because the SSL module is not available.") actual_host = self.host actual_port = self.port if (self.proxy is not None): actual_host = self.proxy.host actual_port = self.proxy.port conn = self.ConnectionCls(host=actual_host, port=actual_port, timeout=self.timeout.connect_timeout, strict=self.strict, **self.conn_kw) return self._prepare_conn(conn)
'Called right before a request is made, after the socket is created.'
def _validate_conn(self, conn):
super(HTTPSConnectionPool, self)._validate_conn(conn) if (not getattr(conn, 'sock', None)): conn.connect() if (not conn.is_verified): warnings.warn('Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.org/en/latest/security.html', InsecureRequestWarning)
'authurl is a random URL on the server that is protected by NTLM. user is the Windows user, probably in the DOMAIN\username format. pw is the password for the user.'
def __init__(self, user, pw, authurl, *args, **kwargs):
super(NTLMConnectionPool, self).__init__(*args, **kwargs) self.authurl = authurl self.rawuser = user user_parts = user.split('\\', 1) self.domain = user_parts[0].upper() self.user = user_parts[1] self.pw = pw
'Make a request using :meth:`urlopen` with the appropriate encoding of ``fields`` based on the ``method`` used. This is a convenience method that requires the least amount of manual effort. It can be used in most situations, while still having the option to drop down to more specific methods when necessary, such as :meth:`request_encode_url`, :meth:`request_encode_body`, or even the lowest level :meth:`urlopen`.'
def request(self, method, url, fields=None, headers=None, **urlopen_kw):
method = method.upper() if (method in self._encode_url_methods): return self.request_encode_url(method, url, fields=fields, headers=headers, **urlopen_kw) else: return self.request_encode_body(method, url, fields=fields, headers=headers, **urlopen_kw)
'Make a request using :meth:`urlopen` with the ``fields`` encoded in the url. This is useful for request methods like GET, HEAD, DELETE, etc.'
def request_encode_url(self, method, url, fields=None, headers=None, **urlopen_kw):
if (headers is None): headers = self.headers extra_kw = {'headers': headers} extra_kw.update(urlopen_kw) if fields: url += ('?' + urlencode(fields)) return self.urlopen(method, url, **extra_kw)
'Make a request using :meth:`urlopen` with the ``fields`` encoded in the body. This is useful for request methods like POST, PUT, PATCH, etc. When ``encode_multipart=True`` (default), then :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the payload with the appropriate content type. Otherwise :meth:`urllib.urlencode` is used with the \'application/x-www-form-urlencoded\' content type. Multipart encoding must be used when posting files, and it\'s reasonably safe to use it in other times too. However, it may break request signing, such as with OAuth. Supports an optional ``fields`` parameter of key/value strings AND key/filetuple. A filetuple is a (filename, data, MIME type) tuple where the MIME type is optional. For example:: fields = { \'foo\': \'bar\', \'fakefile\': (\'foofile.txt\', \'contents of foofile\'), \'realfile\': (\'barfile.txt\', open(\'realfile\').read()), \'typedfile\': (\'bazfile.bin\', open(\'bazfile\').read(), \'image/jpeg\'), \'nonamefile\': \'contents of nonamefile field\', When uploading a file, providing a filename (the first parameter of the tuple) is optional but recommended to best mimick behavior of browsers. Note that if ``headers`` are supplied, the \'Content-Type\' header will be overwritten because it depends on the dynamic random boundary string which is used to compose the body of the request. The random boundary string can be explicitly set with the ``multipart_boundary`` parameter.'
def request_encode_body(self, method, url, fields=None, headers=None, encode_multipart=True, multipart_boundary=None, **urlopen_kw):
if (headers is None): headers = self.headers extra_kw = {'headers': {}} if fields: if ('body' in urlopen_kw): raise TypeError("request got values for both 'fields' and 'body', can only specify one.") if encode_multipart: (body, content_type) = encode_multipart_formdata(fields, boundary=multipart_boundary) else: (body, content_type) = (urlencode(fields), 'application/x-www-form-urlencoded') extra_kw['body'] = body extra_kw['headers'] = {'Content-Type': content_type} extra_kw['headers'].update(headers) extra_kw.update(urlopen_kw) return self.urlopen(method, url, **extra_kw)
'Initialize RequestException with `request` and `response` objects.'
def __init__(self, *args, **kwargs):
response = kwargs.pop('response', None) self.response = response self.request = kwargs.pop('request', None) if ((response is not None) and (not self.request) and hasattr(response, 'request')): self.request = self.response.request super(RequestException, self).__init__(*args, **kwargs)
'Build the path URL to use.'
@property def path_url(self):
url = [] p = urlsplit(self.url) path = p.path if (not path): path = '/' url.append(path) query = p.query if query: url.append('?') url.append(query) return ''.join(url)
'Encode parameters in a piece of data. Will successfully encode parameters when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict.'
@staticmethod def _encode_params(data):
if isinstance(data, (str, bytes)): return data elif hasattr(data, 'read'): return data elif hasattr(data, '__iter__'): result = [] for (k, vs) in to_key_val_list(data): if (isinstance(vs, basestring) or (not hasattr(vs, '__iter__'))): vs = [vs] for v in vs: if (v is not None): result.append(((k.encode('utf-8') if isinstance(k, str) else k), (v.encode('utf-8') if isinstance(v, str) else v))) return urlencode(result, doseq=True) else: return data
'Build the body for a multipart/form-data request. Will successfully encode files when passed as a dict or a list of 2-tuples. Order is retained if data is a list of 2-tuples but arbitrary if parameters are supplied as a dict.'
@staticmethod def _encode_files(files, data):
if (not files): raise ValueError('Files must be provided.') elif isinstance(data, basestring): raise ValueError('Data must not be a string.') new_fields = [] fields = to_key_val_list((data or {})) files = to_key_val_list((files or {})) for (field, val) in fields: if (isinstance(val, basestring) or (not hasattr(val, '__iter__'))): val = [val] for v in val: if (v is not None): if (not isinstance(v, bytes)): v = str(v) new_fields.append(((field.decode('utf-8') if isinstance(field, bytes) else field), (v.encode('utf-8') if isinstance(v, str) else v))) for (k, v) in files: ft = None fh = None if isinstance(v, (tuple, list)): if (len(v) == 2): (fn, fp) = v elif (len(v) == 3): (fn, fp, ft) = v else: (fn, fp, ft, fh) = v else: fn = (guess_filename(v) or k) fp = v if isinstance(fp, (str, bytes, bytearray)): fdata = fp else: fdata = fp.read() rf = RequestField(name=k, data=fdata, filename=fn, headers=fh) rf.make_multipart(content_type=ft) new_fields.append(rf) (body, content_type) = encode_multipart_formdata(new_fields) return (body, content_type)
'Properly register a hook.'
def register_hook(self, event, hook):
if (event not in self.hooks): raise ValueError(('Unsupported event specified, with event name "%s"' % event)) if isinstance(hook, collections.Callable): self.hooks[event].append(hook) elif hasattr(hook, '__iter__'): self.hooks[event].extend((h for h in hook if isinstance(h, collections.Callable)))
'Deregister a previously registered hook. Returns True if the hook existed, False if not.'
def deregister_hook(self, event, hook):
try: self.hooks[event].remove(hook) return True except ValueError: return False
'Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it.'
def prepare(self):
p = PreparedRequest() p.prepare(method=self.method, url=self.url, headers=self.headers, files=self.files, data=self.data, json=self.json, params=self.params, auth=self.auth, cookies=self.cookies, hooks=self.hooks) return p
'Prepares the entire request with the given parameters.'
def prepare(self, method=None, url=None, headers=None, files=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
self.prepare_method(method) self.prepare_url(url, params) self.prepare_headers(headers) self.prepare_cookies(cookies) self.prepare_body(data, files, json) self.prepare_auth(auth, url) self.prepare_hooks(hooks)
'Prepares the given HTTP method.'
def prepare_method(self, method):
self.method = method if (self.method is not None): self.method = to_native_string(self.method.upper())
'Prepares the given HTTP URL.'
def prepare_url(self, url, params):
if isinstance(url, bytes): url = url.decode('utf8') else: url = (unicode(url) if is_py2 else str(url)) if ((':' in url) and (not url.lower().startswith('http'))): self.url = url return try: (scheme, auth, host, port, path, query, fragment) = parse_url(url) except LocationParseError as e: raise InvalidURL(*e.args) if (not scheme): error = 'Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?' error = error.format(to_native_string(url, 'utf8')) raise MissingSchema(error) if (not host): raise InvalidURL(('Invalid URL %r: No host supplied' % url)) try: host = host.encode('idna').decode('utf-8') except UnicodeError: raise InvalidURL('URL has an invalid label.') netloc = (auth or '') if netloc: netloc += '@' netloc += host if port: netloc += (':' + str(port)) if (not path): path = '/' if is_py2: if isinstance(scheme, str): scheme = scheme.encode('utf-8') if isinstance(netloc, str): netloc = netloc.encode('utf-8') if isinstance(path, str): path = path.encode('utf-8') if isinstance(query, str): query = query.encode('utf-8') if isinstance(fragment, str): fragment = fragment.encode('utf-8') if isinstance(params, (str, bytes)): params = to_native_string(params) enc_params = self._encode_params(params) if enc_params: if query: query = ('%s&%s' % (query, enc_params)) else: query = enc_params url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment])) self.url = url
'Prepares the given HTTP headers.'
def prepare_headers(self, headers):
if headers: self.headers = CaseInsensitiveDict(((to_native_string(name), value) for (name, value) in headers.items())) else: self.headers = CaseInsensitiveDict()
'Prepares the given HTTP body data.'
def prepare_body(self, data, files, json=None):
body = None content_type = None length = None if ((not data) and (json is not None)): content_type = 'application/json' body = complexjson.dumps(json) is_stream = all([hasattr(data, '__iter__'), (not isinstance(data, (basestring, list, tuple, dict)))]) try: length = super_len(data) except (TypeError, AttributeError, UnsupportedOperation): length = None if is_stream: body = data if files: raise NotImplementedError('Streamed bodies and files are mutually exclusive.') if length: self.headers['Content-Length'] = builtin_str(length) else: self.headers['Transfer-Encoding'] = 'chunked' else: if files: (body, content_type) = self._encode_files(files, data) elif data: body = self._encode_params(data) if (isinstance(data, basestring) or hasattr(data, 'read')): content_type = None else: content_type = 'application/x-www-form-urlencoded' self.prepare_content_length(body) if (content_type and ('content-type' not in self.headers)): self.headers['Content-Type'] = content_type self.body = body
'Prepares the given HTTP auth data.'
def prepare_auth(self, auth, url=''):
if (auth is None): url_auth = get_auth_from_url(self.url) auth = (url_auth if any(url_auth) else None) if auth: if (isinstance(auth, tuple) and (len(auth) == 2)): auth = HTTPBasicAuth(*auth) r = auth(self) self.__dict__.update(r.__dict__) self.prepare_content_length(self.body)
'Prepares the given HTTP cookie data. This function eventually generates a ``Cookie`` header from the given cookies using cookielib. Due to cookielib\'s design, the header will not be regenerated if it already exists, meaning this function can only be called once for the life of the :class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls to ``prepare_cookies`` will have no actual effect, unless the "Cookie" header is removed beforehand.'
def prepare_cookies(self, cookies):
if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies else: self._cookies = cookiejar_from_dict(cookies) cookie_header = get_cookie_header(self._cookies, self) if (cookie_header is not None): self.headers['Cookie'] = cookie_header
'Prepares the given hooks.'
def prepare_hooks(self, hooks):
hooks = (hooks or []) for event in hooks: self.register_hook(event, hooks[event])
'Returns true if :attr:`status_code` is \'OK\'.'
def __bool__(self):
return self.ok
'Returns true if :attr:`status_code` is \'OK\'.'
def __nonzero__(self):
return self.ok
'Allows you to use a response as an iterator.'
def __iter__(self):
return self.iter_content(128)
'True if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`).'
@property def is_redirect(self):
return (('location' in self.headers) and (self.status_code in REDIRECT_STATI))
'True if this Response one of the permanent versions of redirect'
@property def is_permanent_redirect(self):
return (('location' in self.headers) and (self.status_code in (codes.moved_permanently, codes.permanent_redirect)))
'The apparent encoding, provided by the chardet library'
@property def apparent_encoding(self):
return chardet.detect(self.content)['encoding']
'Iterates over the response data. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. The chunk size is the number of bytes it should read into memory. This is not necessarily the length of each item returned as decoding can take place. If decode_unicode is True, content will be decoded using the best available encoding based on the response.'
def iter_content(self, chunk_size=1, decode_unicode=False):
def generate(): if hasattr(self.raw, 'stream'): try: for chunk in self.raw.stream(chunk_size, decode_content=True): (yield chunk) except ProtocolError as e: raise ChunkedEncodingError(e) except DecodeError as e: raise ContentDecodingError(e) except ReadTimeoutError as e: raise ConnectionError(e) else: while True: chunk = self.raw.read(chunk_size) if (not chunk): break (yield chunk) self._content_consumed = True if (self._content_consumed and isinstance(self._content, bool)): raise StreamConsumedError() reused_chunks = iter_slices(self._content, chunk_size) stream_chunks = generate() chunks = (reused_chunks if self._content_consumed else stream_chunks) if decode_unicode: chunks = stream_decode_response_unicode(chunks, self) return chunks
'Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. .. note:: This method is not reentrant safe.'
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None, delimiter=None):
pending = None for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): if (pending is not None): chunk = (pending + chunk) if delimiter: lines = chunk.split(delimiter) else: lines = chunk.splitlines() if (lines and lines[(-1)] and chunk and (lines[(-1)][(-1)] == chunk[(-1)])): pending = lines.pop() else: pending = None for line in lines: (yield line) if (pending is not None): (yield pending)
'Content of the response, in bytes.'
@property def content(self):
if (self._content is False): try: if self._content_consumed: raise RuntimeError('The content for this response was already consumed') if (self.status_code == 0): self._content = None else: self._content = (bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()) except AttributeError: self._content = None self._content_consumed = True return self._content
'Content of the response, in unicode. If Response.encoding is None, encoding will be guessed using ``chardet``. The encoding of the response content is determined based solely on HTTP headers, following RFC 2616 to the letter. If you can take advantage of non-HTTP knowledge to make a better guess at the encoding, you should set ``r.encoding`` appropriately before accessing this property.'
@property def text(self):
content = None encoding = self.encoding if (not self.content): return str('') if (self.encoding is None): encoding = self.apparent_encoding try: content = str(self.content, encoding, errors='replace') except (LookupError, TypeError): content = str(self.content, errors='replace') return content
'Returns the json-encoded content of a response, if any. :param \*\*kwargs: Optional arguments that ``json.loads`` takes.'
def json(self, **kwargs):
if ((not self.encoding) and (len(self.content) > 3)): encoding = guess_json_utf(self.content) if (encoding is not None): try: return complexjson.loads(self.content.decode(encoding), **kwargs) except UnicodeDecodeError: pass return complexjson.loads(self.text, **kwargs)
'Returns the parsed header links of the response, if any.'
@property def links(self):
header = self.headers.get('link') l = {} if header: links = parse_header_links(header) for link in links: key = (link.get('rel') or link.get('url')) l[key] = link return l
'Raises stored :class:`HTTPError`, if one occurred.'
def raise_for_status(self):
http_error_msg = '' if (400 <= self.status_code < 500): http_error_msg = ('%s Client Error: %s for url: %s' % (self.status_code, self.reason, self.url)) elif (500 <= self.status_code < 600): http_error_msg = ('%s Server Error: %s for url: %s' % (self.status_code, self.reason, self.url)) if http_error_msg: raise HTTPError(http_error_msg, response=self)
'Releases the connection back to the pool. Once this method has been called the underlying ``raw`` object must not be accessed again. *Note: Should not normally need to be called explicitly.*'
def close(self):
if (not self._content_consumed): return self.raw.close() return self.raw.release_conn()
'Reset num_401_calls counter on redirects.'
def handle_redirect(self, r, **kwargs):
if r.is_redirect: self._thread_local.num_401_calls = 1
'Takes the given response and tries digest-auth, if needed.'
def handle_401(self, r, **kwargs):
if (self._thread_local.pos is not None): r.request.body.seek(self._thread_local.pos) s_auth = r.headers.get('www-authenticate', '') if (('digest' in s_auth.lower()) and (self._thread_local.num_401_calls < 2)): self._thread_local.num_401_calls += 1 pat = re.compile('digest ', flags=re.IGNORECASE) self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) r.content r.close() prep = r.request.copy() extract_cookies_to_jar(prep._cookies, r.request, r.raw) prep.prepare_cookies(prep._cookies) prep.headers['Authorization'] = self.build_digest_header(prep.method, prep.url) _r = r.connection.send(prep, **kwargs) _r.history.append(r) _r.request = prep return _r self._thread_local.num_401_calls = 1 return r
'Receives a Response. Returns a generator of Responses.'
def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, **adapter_kwargs):
i = 0 hist = [] while resp.is_redirect: prepared_request = req.copy() if (i > 0): hist.append(resp) new_hist = list(hist) resp.history = new_hist try: resp.content except (ChunkedEncodingError, ContentDecodingError, RuntimeError): resp.raw.read(decode_content=False) if (i >= self.max_redirects): raise TooManyRedirects(('Exceeded %s redirects.' % self.max_redirects)) resp.close() url = resp.headers['location'] method = req.method if url.startswith('//'): parsed_rurl = urlparse(resp.url) url = ('%s:%s' % (parsed_rurl.scheme, url)) parsed = urlparse(url) url = parsed.geturl() if (not parsed.netloc): url = urljoin(resp.url, requote_uri(url)) else: url = requote_uri(url) prepared_request.url = to_native_string(url) if (resp.is_permanent_redirect and (req.url != prepared_request.url)): self.redirect_cache[req.url] = prepared_request.url if ((resp.status_code == codes.see_other) and (method != 'HEAD')): method = 'GET' if ((resp.status_code == codes.found) and (method != 'HEAD')): method = 'GET' if ((resp.status_code == codes.moved) and (method == 'POST')): method = 'GET' prepared_request.method = method if (resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect)): if ('Content-Length' in prepared_request.headers): del prepared_request.headers['Content-Length'] prepared_request.body = None headers = prepared_request.headers try: del headers['Cookie'] except KeyError: pass extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) prepared_request._cookies.update(self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) proxies = self.rebuild_proxies(prepared_request, proxies) self.rebuild_auth(prepared_request, resp) req = prepared_request resp = self.send(req, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, allow_redirects=False, **adapter_kwargs) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) i += 1 (yield resp)