desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Send a given PreparedRequest. :rtype: requests.Response'
def send(self, request, **kwargs):
kwargs.setdefault('stream', self.stream) kwargs.setdefault('verify', self.verify) kwargs.setdefault('cert', self.cert) kwargs.setdefault('proxies', self.proxies) if isinstance(request, Request): raise ValueError('You can only send PreparedRequests.') allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') hooks = request.hooks adapter = self.get_adapter(url=request.url) start = preferred_clock() r = adapter.send(request, **kwargs) elapsed = (preferred_clock() - start) r.elapsed = timedelta(seconds=elapsed) r = dispatch_hook('response', hooks, r, **kwargs) if r.history: for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) extract_cookies_to_jar(self.cookies, request, r.raw) gen = self.resolve_redirects(r, request, **kwargs) history = ([resp for resp in gen] if allow_redirects else []) if history: history.insert(0, r) r = history.pop() r.history = history if (not allow_redirects): try: r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) except StopIteration: pass if (not stream): r.content return r
'Check the environment and merge it with some settings. :rtype: dict'
def merge_environment_settings(self, url, proxies, stream, verify, cert):
if self.trust_env: no_proxy = (proxies.get('no_proxy') if (proxies is not None) else None) env_proxies = get_environ_proxies(url, no_proxy=no_proxy) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) if ((verify is True) or (verify is None)): verify = (os.environ.get('REQUESTS_CA_BUNDLE') or os.environ.get('CURL_CA_BUNDLE')) proxies = merge_setting(proxies, self.proxies) stream = merge_setting(stream, self.stream) verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) return {'verify': verify, 'proxies': proxies, 'stream': stream, 'cert': cert}
'Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter'
def get_adapter(self, url):
for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix): return adapter raise InvalidSchema(("No connection adapters were found for '%s'" % url))
'Closes all adapters and as such the session'
def close(self):
for v in self.adapters.values(): v.close()
'Registers a connection adapter to a prefix. Adapters are sorted in descending order by prefix length.'
def mount(self, prefix, adapter):
self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if (len(k) < len(prefix))] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key)
'Like iteritems(), but with all lowercase keys.'
def lower_items(self):
return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items())
'Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param verify: (optional) Either a boolean, in which case it controls whether we verify the server\'s TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request.'
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
raise NotImplementedError
'Cleans up adapter specific items.'
def close(self):
raise NotImplementedError
'Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.'
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs):
self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs)
'Return urllib3 ProxyManager for the given proxy. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxy: The proxy to return a urllib3 ProxyManager for. :param proxy_kwargs: Extra keyword arguments used to configure the Proxy Manager. :returns: ProxyManager :rtype: urllib3.ProxyManager'
def proxy_manager_for(self, proxy, **proxy_kwargs):
if (proxy in self.proxy_manager): manager = self.proxy_manager[proxy] elif proxy.lower().startswith('socks'): (username, password) = get_auth_from_url(proxy) manager = self.proxy_manager[proxy] = SOCKSProxyManager(proxy, username=username, password=password, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) else: proxy_headers = self.proxy_headers(proxy) manager = self.proxy_manager[proxy] = proxy_from_url(proxy, proxy_headers=proxy_headers, num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, **proxy_kwargs) return manager
'Verify a SSL certificate. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param conn: The urllib3 connection object associated with the cert. :param url: The requested URL. :param verify: Either a boolean, in which case it controls whether we verify the server\'s TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: The SSL certificate to verify.'
def cert_verify(self, conn, url, verify, cert):
if (url.lower().startswith('https') and verify): cert_loc = None if (verify is not True): cert_loc = verify if (not cert_loc): cert_loc = DEFAULT_CA_BUNDLE_PATH if ((not cert_loc) or (not os.path.exists(cert_loc))): raise IOError('Could not find a suitable TLS CA certificate bundle, invalid path: {0}'.format(cert_loc)) conn.cert_reqs = 'CERT_REQUIRED' if (not os.path.isdir(cert_loc)): conn.ca_certs = cert_loc else: conn.ca_cert_dir = cert_loc else: conn.cert_reqs = 'CERT_NONE' conn.ca_certs = None conn.ca_cert_dir = None if cert: if (not isinstance(cert, basestring)): conn.cert_file = cert[0] conn.key_file = cert[1] else: conn.cert_file = cert conn.key_file = None if (conn.cert_file and (not os.path.exists(conn.cert_file))): raise IOError('Could not find the TLS certificate file, invalid path: {0}'.format(conn.cert_file)) if (conn.key_file and (not os.path.exists(conn.key_file))): raise IOError('Could not find the TLS key file, invalid path: {0}'.format(conn.key_file))
'Builds a :class:`Response <requests.Response>` object from a urllib3 response. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>` :param req: The :class:`PreparedRequest <PreparedRequest>` used to generate the response. :param resp: The urllib3 response object. :rtype: requests.Response'
def build_response(self, req, resp):
response = Response() response.status_code = getattr(resp, 'status', None) response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) response.encoding = get_encoding_from_headers(response.headers) response.raw = resp response.reason = response.raw.reason if isinstance(req.url, bytes): response.url = req.url.decode('utf-8') else: response.url = req.url extract_cookies_to_jar(response.cookies, req, resp) response.request = req response.connection = self return response
'Returns a urllib3 connection for the given URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param url: The URL to connect to. :param proxies: (optional) A Requests-style dictionary of proxies used on this request. :rtype: urllib3.ConnectionPool'
def get_connection(self, url, proxies=None):
proxy = select_proxy(url, proxies) if proxy: proxy = prepend_scheme_if_needed(proxy, 'http') proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_url(url) else: parsed = urlparse(url) url = parsed.geturl() conn = self.poolmanager.connection_from_url(url) return conn
'Disposes of any internal state. Currently, this closes the PoolManager and any active ProxyManager, which closes any pooled connections.'
def close(self):
self.poolmanager.clear() for proxy in self.proxy_manager.values(): proxy.clear()
'Obtain the url to use when making the final request. If the message is being sent through a HTTP proxy, the full URL has to be used. Otherwise, we should only use the path portion of the URL. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param proxies: A dictionary of schemes or schemes and hosts to proxy URLs. :rtype: str'
def request_url(self, request, proxies):
proxy = select_proxy(request.url, proxies) scheme = urlparse(request.url).scheme is_proxied_http_request = (proxy and (scheme != 'https')) using_socks_proxy = False if proxy: proxy_scheme = urlparse(proxy).scheme.lower() using_socks_proxy = proxy_scheme.startswith('socks') url = request.path_url if (is_proxied_http_request and (not using_socks_proxy)): url = urldefragauth(request.url) return url
'Add any headers needed by the connection. As of v2.0 this does nothing by default, but is left for overriding by users that subclass the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param request: The :class:`PreparedRequest <PreparedRequest>` to add headers to. :param kwargs: The keyword arguments from the call to send().'
def add_headers(self, request, **kwargs):
pass
'Returns a dictionary of the headers to add to any request sent through a proxy. This works with urllib3 magic to ensure that they are correctly sent to the proxy, rather than in a tunnelled request if CONNECT is being used. This should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param proxies: The url of the proxy being used for this request. :rtype: dict'
def proxy_headers(self, proxy):
headers = {} (username, password) = get_auth_from_url(proxy) if username: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return headers
'Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest <PreparedRequest>` being sent. :param stream: (optional) Whether to stream the request content. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple or urllib3 Timeout object :param verify: (optional) Either a boolean, in which case it controls whether we verify the server\'s TLS certificate, or a string, in which case it must be a path to a CA bundle to use :param cert: (optional) Any user-provided SSL certificate to be trusted. :param proxies: (optional) The proxies dictionary to apply to the request. :rtype: requests.Response'
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
conn = self.get_connection(request.url, proxies) self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) self.add_headers(request) chunked = (not ((request.body is None) or ('Content-Length' in request.headers))) if isinstance(timeout, tuple): try: (connect, read) = timeout timeout = TimeoutSauce(connect=connect, read=read) except ValueError as e: err = 'Invalid timeout {0}. Pass a (connect, read) timeout tuple, or a single float to set both timeouts to the same value'.format(timeout) raise ValueError(err) elif isinstance(timeout, TimeoutSauce): pass else: timeout = TimeoutSauce(connect=timeout, read=timeout) try: if (not chunked): resp = conn.urlopen(method=request.method, url=url, body=request.body, headers=request.headers, redirect=False, assert_same_host=False, preload_content=False, decode_content=False, retries=self.max_retries, timeout=timeout) else: if hasattr(conn, 'proxy_pool'): conn = conn.proxy_pool low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: low_conn.putrequest(request.method, url, skip_accept_encoding=True) for (header, value) in request.headers.items(): low_conn.putheader(header, value) low_conn.endheaders() for i in request.body: low_conn.send(hex(len(i))[2:].encode('utf-8')) low_conn.send('\r\n') low_conn.send(i) low_conn.send('\r\n') low_conn.send('0\r\n\r\n') try: r = low_conn.getresponse(buffering=True) except TypeError: r = low_conn.getresponse() resp = HTTPResponse.from_httplib(r, pool=conn, connection=low_conn, preload_content=False, decode_content=False) except: low_conn.close() raise except (ProtocolError, socket.error) as err: raise ConnectionError(err, request=request) except MaxRetryError as e: if isinstance(e.reason, ConnectTimeoutError): if (not isinstance(e.reason, NewConnectionError)): raise ConnectTimeout(e, request=request) if isinstance(e.reason, ResponseError): raise RetryError(e, request=request) if isinstance(e.reason, _ProxyError): raise ProxyError(e, request=request) if isinstance(e.reason, _SSLError): raise SSLError(e, request=request) raise ConnectionError(e, request=request) except ClosedPoolError as e: raise ConnectionError(e, request=request) except _ProxyError as e: raise ProxyError(e) except (_SSLError, _HTTPError) as e: if isinstance(e, _SSLError): raise SSLError(e, request=request) elif isinstance(e, ReadTimeoutError): raise ReadTimeout(e, request=request) else: raise return self.build_response(request, resp)
'Tests resolve_redirect doesn\'t fail when merging cookies with non-RequestsCookieJar cookiejar. See GH #3579'
def test_cookielib_cookiejar_on_redirect(self, httpbin):
cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar()) s = requests.Session() s.cookies = cookiejar_from_dict({'cookie': 'tasty'}) req = requests.Request('GET', httpbin('headers'), cookies=cj) prep_req = req.prepare() resp = s.send(prep_req) resp.status_code = 302 resp.headers['location'] = httpbin('get') redirects = s.resolve_redirects(resp, prep_req) resp = next(redirects) assert isinstance(prep_req._cookies, cookielib.CookieJar) assert isinstance(resp.request._cookies, cookielib.CookieJar) assert (not isinstance(resp.request._cookies, requests.cookies.RequestsCookieJar)) cookies = {} for c in resp.request._cookies: cookies[c.name] = c.value assert (cookies['foo'] == 'bar') assert (cookies['cookie'] == 'tasty')
'Show that even with redirects, Response.history is always a list.'
def test_history_is_always_a_list(self, httpbin):
resp = requests.get(httpbin('get')) assert isinstance(resp.history, list) resp = requests.get(httpbin('redirect/1')) assert isinstance(resp.history, list) assert (not isinstance(resp.history, tuple))
'Do not send headers in Session.headers with None values.'
def test_headers_on_session_with_None_are_not_sent(self, httpbin):
ses = requests.Session() ses.headers['Accept-Encoding'] = None req = requests.Request('GET', httpbin('get')) prep = ses.prepare_request(req) assert ('Accept-Encoding' not in prep.headers)
'Preserve order when headers provided as OrderedDict.'
def test_headers_preserve_order(self, httpbin):
ses = requests.Session() ses.headers = OrderedDict() ses.headers['Accept-Encoding'] = 'identity' ses.headers['First'] = '1' ses.headers['Second'] = '2' headers = OrderedDict([('Third', '3'), ('Fourth', '4')]) headers['Fifth'] = '5' headers['Second'] = '222' req = requests.Request('GET', httpbin('get'), headers=headers) prep = ses.prepare_request(req) items = list(prep.headers.items()) assert (items[0] == ('Accept-Encoding', 'identity')) assert (items[1] == ('First', '1')) assert (items[2] == ('Second', '222')) assert (items[3] == ('Third', '3')) assert (items[4] == ('Fourth', '4')) assert (items[5] == ('Fifth', '5'))
'Ensure b\'test\' formats as the byte string "test" rather than the unicode string "b\'test\'" in Python 3.'
def test_basicauth_encodes_byte_strings(self):
auth = ('\xc5\xafsername', 'test\xc6\xb6') r = requests.Request('GET', 'http://localhost', auth=auth) p = r.prepare() assert (p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg==')
'warnings are emitted with requests.get'
def test_https_warnings(self, httpbin_secure, httpbin_ca_bundle):
if (HAS_MODERN_SSL or HAS_PYOPENSSL): warnings_expected = ('SubjectAltNameWarning',) else: warnings_expected = ('SNIMissingWarning', 'InsecurePlatformWarning', 'SubjectAltNameWarning') with pytest.warns(None) as warning_records: warnings.simplefilter('always') requests.get(httpbin_secure('status', '200'), verify=httpbin_ca_bundle) warning_records = [item for item in warning_records if (item.category.__name__ != 'ResourceWarning')] warnings_category = tuple((item.category.__name__ for item in warning_records)) assert (warnings_category == warnings_expected)
'When underlying SSL problems occur, an SSLError is raised.'
def test_certificate_failure(self, httpbin_secure):
with pytest.raises(SSLError): requests.get(httpbin_secure('status', '200'))
'When called with decode_unicode, Response.iter_content should always return unicode.'
def test_response_decode_unicode(self):
r = requests.Response() r._content_consumed = True r._content = 'the content' r.encoding = 'ascii' chunks = r.iter_content(decode_unicode=True) assert all((isinstance(chunk, str) for chunk in chunks)) r = requests.Response() r.raw = io.BytesIO('the content') r.encoding = 'ascii' chunks = r.iter_content(decode_unicode=True) assert all((isinstance(chunk, str) for chunk in chunks))
'Ensure that chunk_size is passed as None or an integer, otherwise raise a TypeError.'
def test_response_chunk_size_type(self):
r = requests.Response() r.raw = io.BytesIO('the content') chunks = r.iter_content(1) assert all(((len(chunk) == 1) for chunk in chunks)) r = requests.Response() r.raw = io.BytesIO('the content') chunks = r.iter_content(None) assert (list(chunks) == ['the content']) r = requests.Response() r.raw = io.BytesIO('the content') with pytest.raises(TypeError): chunks = r.iter_content('1024')
'Ensure that header updates are done case-insensitively.'
def test_fixes_1329(self, httpbin):
s = requests.Session() s.headers.update({'ACCEPT': 'BOGUS'}) s.headers.update({'accept': 'application/json'}) r = s.get(httpbin('get')) headers = r.request.headers assert (headers['accept'] == 'application/json') assert (headers['Accept'] == 'application/json') assert (headers['ACCEPT'] == 'application/json')
'Ensure prepare_headers regex isn\'t flagging valid header contents.'
def test_header_validation(self, httpbin):
headers_ok = {'foo': 'bar baz qux', 'bar': u'fbbq'.encode('utf8'), 'baz': '', 'qux': '1'} r = requests.get(httpbin('get'), headers=headers_ok) assert (r.request.headers['foo'] == headers_ok['foo'])
'Ensure the header value is of type string or bytes as per discussion in GH issue #3386'
def test_header_value_not_str(self, httpbin):
headers_int = {'foo': 3} headers_dict = {'bar': {'foo': 'bar'}} headers_list = {'baz': ['foo', 'bar']} with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_int) with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_dict) with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_list)
'Ensure that a header containing return character sequences raise an exception. Otherwise, multiple headers are created from single string.'
def test_header_no_return_chars(self, httpbin):
headers_ret = {'foo': 'bar\r\nbaz: qux'} headers_lf = {'foo': 'bar\nbaz: qux'} headers_cr = {'foo': 'bar\rbaz: qux'} with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_ret) with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_lf) with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_cr)
'Ensure headers containing leading whitespace raise InvalidHeader Error before sending.'
def test_header_no_leading_space(self, httpbin):
headers_space = {'foo': ' bar'} headers_tab = {'foo': ' bar'} with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_space) with pytest.raises(InvalidHeader): r = requests.get(httpbin('get'), headers=headers_tab)
'Response.iter_lines() is not reentrant safe'
@pytest.mark.xfail def test_response_iter_lines_reentrant(self, httpbin):
r = requests.get(httpbin('stream/4'), stream=True) assert (r.status_code == 200) next(r.iter_lines()) assert (len(list(r.iter_lines())) == 3)
'Test `close` call for non-urllib3-like raw objects. Should work when `release_conn` attr doesn\'t exist on `response.raw`.'
def test_response_without_release_conn(self):
resp = requests.Response() resp.raw = StringIO.StringIO('test') assert (not resp.raw.closed) resp.close() assert resp.raw.closed
'Ensure that a byte stream with size 0 will not set both a Content-Length and Transfer-Encoding header.'
def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin):
auth = ('user', 'pass') url = httpbin('post') file_obj = io.BytesIO('') r = requests.Request('POST', url, auth=auth, data=file_obj) prepared_request = r.prepare() assert ('Transfer-Encoding' in prepared_request.headers) assert ('Content-Length' not in prepared_request.headers)
'Ensure that a byte stream with size > 0 will not set both a Content-Length and Transfer-Encoding header.'
def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin):
auth = ('user', 'pass') url = httpbin('post') file_obj = io.BytesIO('test data') r = requests.Request('POST', url, auth=auth, data=file_obj) prepared_request = r.prepare() assert ('Transfer-Encoding' not in prepared_request.headers) assert ('Content-Length' in prepared_request.headers)
'Ensure that requests with a generator body stream using Transfer-Encoding: chunked, not a Content-Length header.'
def test_chunked_upload_does_not_set_content_length_header(self, httpbin):
data = (i for i in ['a', 'b', 'c']) url = httpbin('post') r = requests.Request('POST', url, data=data) prepared_request = r.prepare() assert ('Transfer-Encoding' in prepared_request.headers) assert ('Content-Length' not in prepared_request.headers)
'Tests a custom mixin to overwrite ``get_redirect_target``. Ensures a subclassed ``requests.Session`` can handle a certain type of malformed redirect responses. 1. original request receives a proper response: 302 redirect 2. following the redirect, a malformed response is given: status code = HTTP 200 location = alternate url 3. the custom session catches the edge case and follows the redirect'
def test_custom_redirect_mixin(self, httpbin):
url_final = httpbin('html') querystring_malformed = urlencode({'location': url_final}) url_redirect_malformed = httpbin(('response-headers?%s' % querystring_malformed)) querystring_redirect = urlencode({'url': url_redirect_malformed}) url_redirect = httpbin(('redirect-to?%s' % querystring_redirect)) urls_test = [url_redirect, url_redirect_malformed, url_final] class CustomRedirectSession(requests.Session, ): def get_redirect_target(self, resp): if resp.is_redirect: return resp.headers['location'] location = resp.headers.get('location') if (location and (location != resp.url)): return location return None session = CustomRedirectSession() r = session.get(urls_test[0]) assert (len(r.history) == 2) assert (r.status_code == 200) assert (r.history[0].status_code == 302) assert r.history[0].is_redirect assert (r.history[1].status_code == 200) assert (not r.history[1].is_redirect) assert (r.url == urls_test[2])
'__setitem__ should behave case-insensitively.'
def test_fixes_649(self):
cid = CaseInsensitiveDict() cid['spam'] = 'oneval' cid['Spam'] = 'twoval' cid['sPAM'] = 'redval' cid['SPAM'] = 'blueval' assert (cid['spam'] == 'blueval') assert (cid['SPAM'] == 'blueval') assert (list(cid.keys()) == ['SPAM'])
'Test case where we convert expires from string time.'
def test_expires_valid_str(self):
morsel = Morsel() morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT' cookie = morsel_to_cookie(morsel) assert (cookie.expires == 1)
'Test case where an invalid type is passed for expires.'
@pytest.mark.parametrize('value, exception', ((100, TypeError), ('woops', ValueError))) def test_expires_invalid_int(self, value, exception):
morsel = Morsel() morsel['expires'] = value with pytest.raises(exception): morsel_to_cookie(morsel)
'Test case where expires is None.'
def test_expires_none(self):
morsel = Morsel() morsel['expires'] = None cookie = morsel_to_cookie(morsel) assert (cookie.expires is None)
'Test case where a valid max age in seconds is passed.'
def test_max_age_valid_int(self):
morsel = Morsel() morsel['max-age'] = 60 cookie = morsel_to_cookie(morsel) assert isinstance(cookie.expires, int)
'Test case where a invalid max age is passed.'
def test_max_age_invalid_str(self):
morsel = Morsel() morsel['max-age'] = 'woops' with pytest.raises(TypeError): morsel_to_cookie(morsel)
'Check that you can set None as a valid timeout value. To actually test this behavior, we\'d want to check that setting the timeout to None actually lets the request block past the system default timeout. However, this would make the test suite unbearably slow. Instead we verify that setting the timeout to None does not prevent the request from succeeding.'
@pytest.mark.parametrize('timeout', (None, Urllib3Timeout(connect=None, read=None))) def test_none_timeout(self, httpbin, timeout):
r = requests.get(httpbin('get'), timeout=timeout) assert (r.status_code == 200)
'See: https://github.com/requests/requests/issues/2316'
def test_encoded_methods(self, httpbin):
r = requests.request('GET', httpbin('get')) assert r.ok
'This test validates that we correctly exclude some URLs from preparation, and that we handle others. Specifically, it tests that any URL whose scheme doesn\'t begin with "http" is left alone, and those whose scheme *does* begin with "http" are mutated.'
@pytest.mark.parametrize('input, expected', (('http+unix://%2Fvar%2Frun%2Fsocket/path%7E', u'http+unix://%2Fvar%2Frun%2Fsocket/path~'), (u'http+unix://%2Fvar%2Frun%2Fsocket/path%7E', u'http+unix://%2Fvar%2Frun%2Fsocket/path~'), ('mailto:[email protected]', u'mailto:[email protected]'), (u'mailto:[email protected]', u'mailto:[email protected]'), ('data:SSDimaUgUHl0aG9uIQ==', u'data:SSDimaUgUHl0aG9uIQ=='))) def test_url_mutation(self, input, expected):
r = requests.Request('GET', url=input) p = r.prepare() assert (p.url == expected)
'Setting parameters for nonstandard schemes is allowed if those schemes begin with "http", and is forbidden otherwise.'
@pytest.mark.parametrize('input, params, expected', (('http+unix://%2Fvar%2Frun%2Fsocket/path', {'key': 'value'}, u'http+unix://%2Fvar%2Frun%2Fsocket/path?key=value'), (u'http+unix://%2Fvar%2Frun%2Fsocket/path', {'key': 'value'}, u'http+unix://%2Fvar%2Frun%2Fsocket/path?key=value'), ('mailto:[email protected]', {'key': 'value'}, u'mailto:[email protected]'), (u'mailto:[email protected]', {'key': 'value'}, u'mailto:[email protected]'))) def test_parameters_for_nonstandard_schemes(self, input, params, expected):
r = requests.Request('GET', url=input, params=params) p = r.prepare() assert (p.url == expected)
'CaseInsensitiveDict instance with "Accept" header.'
@pytest.fixture(autouse=True) def setup(self):
self.case_insensitive_dict = CaseInsensitiveDict() self.case_insensitive_dict['Accept'] = 'application/json'
'LookupDict instance with "bad_gateway" attribute.'
@pytest.fixture(autouse=True) def setup(self):
self.lookup_dict = LookupDict('test') self.lookup_dict.bad_gateway = 502
'Ensures that we properly deal with different kinds of IO streams.'
@pytest.mark.parametrize('stream, value', ((StringIO.StringIO, 'Test'), (BytesIO, 'Test'), pytest.mark.skipif('cStringIO is None')((cStringIO, 'Test')))) def test_io_streams(self, stream, value):
assert (super_len(stream()) == 0) assert (super_len(stream(value)) == 4)
'Ensure that we handle partially consumed file like objects.'
def test_super_len_correctly_calculates_len_of_partially_read_file(self):
s = StringIO.StringIO() s.write('foobarbogus') assert (super_len(s) == 0)
'If tell() raises errors, assume the cursor is at position zero.'
@pytest.mark.parametrize('error', [IOError, OSError]) def test_super_len_handles_files_raising_weird_errors_in_tell(self, error):
class BoomFile(object, ): def __len__(self): return 5 def tell(self): raise error() assert (super_len(BoomFile()) == 0)
'Ensure that if tell gives an IOError super_len doesn\'t fail'
@pytest.mark.parametrize('error', [IOError, OSError]) def test_super_len_tell_ioerror(self, error):
class NoLenBoomFile(object, ): def tell(self): raise error() def seek(self, offset, whence): pass assert (super_len(NoLenBoomFile()) == 0)
'Ensure that objects without any length methods default to 0'
def test_super_len_with_no_matches(self):
assert (super_len(object()) == 0)
'messages are sent and received properly'
def test_basic(self):
question = 'success?' answer = 'yeah, success' def handler(sock): text = sock.recv(1000) assert (text == question) sock.sendall(answer) with Server(handler) as (host, port): sock = socket.socket() sock.connect((host, port)) sock.sendall(question) text = sock.recv(1000) assert (text == answer) sock.close()
'the server closes when leaving the context manager'
def test_server_closes(self):
with Server.basic_response_server() as (host, port): sock = socket.socket() sock.connect((host, port)) sock.close() with pytest.raises(socket.error): new_sock = socket.socket() new_sock.connect((host, port))
'the text_response_server sends the given text'
def test_text_response(self):
server = Server.text_response_server((('HTTP/1.1 200 OK\r\n' + 'Content-Length: 6\r\n') + '\r\nroflol')) with server as (host, port): r = requests.get('http://{0}:{1}'.format(host, port)) assert (r.status_code == 200) assert (r.text == u'roflol') assert (r.headers['Content-Length'] == '6')
'the basic response server returns an empty http response'
def test_basic_response(self):
with Server.basic_response_server() as (host, port): r = requests.get('http://{0}:{1}'.format(host, port)) assert (r.status_code == 200) assert (r.text == u'') assert (r.headers['Content-Length'] == '0')
'the server waits for the block_server event to be set before closing'
def test_basic_waiting_server(self):
block_server = threading.Event() with Server.basic_response_server(wait_to_close_event=block_server) as (host, port): sock = socket.socket() sock.connect((host, port)) sock.sendall('send something') time.sleep(2.5) sock.sendall('still alive') block_server.set()
'multiple requests can be served'
def test_multiple_requests(self):
requests_to_handle = 5 server = Server.basic_response_server(requests_to_handle=requests_to_handle) with server as (host, port): server_url = 'http://{0}:{1}'.format(host, port) for _ in range(requests_to_handle): r = requests.get(server_url) assert (r.status_code == 200) with pytest.raises(requests.exceptions.ConnectionError): r = requests.get(server_url)
'can check the requests content'
@pytest.mark.skip(reason='this fails non-deterministically under pytest-xdist') def test_request_recovery(self):
server = Server.basic_response_server(requests_to_handle=2) first_request = 'put your hands up in the air' second_request = 'put your hand down in the floor' with server as address: sock1 = socket.socket() sock2 = socket.socket() sock1.connect(address) sock1.sendall(first_request) sock1.close() sock2.connect(address) sock2.sendall(second_request) sock2.close() assert (server.handler_results[0] == first_request) assert (server.handler_results[1] == second_request)
'the basic response handler times out when receiving requests'
def test_requests_after_timeout_are_not_received(self):
server = Server.basic_response_server(request_timeout=1) with server as address: sock = socket.socket() sock.connect(address) time.sleep(1.5) sock.sendall('hehehe, not received') sock.close() assert (server.handler_results[0] == '')
'a biggest timeout can be specified'
def test_request_recovery_with_bigger_timeout(self):
server = Server.basic_response_server(request_timeout=3) data = 'bananadine' with server as address: sock = socket.socket() sock.connect(address) time.sleep(1.5) sock.sendall(data) sock.close() assert (server.handler_results[0] == data)
'the server thread exits even if an exception exits the context manager'
def test_server_finishes_on_error(self):
server = Server.basic_response_server() with pytest.raises(Exception): with server: raise Exception() assert (len(server.handler_results) == 0)
'the server thread exits even if there are no connections'
def test_server_finishes_when_no_connections(self):
server = Server.basic_response_server() with server: pass assert (len(server.handler_results) == 0)
'refresh_queries() launches an execution task for each query returned from Query.outdated_queries().'
def test_enqueues_outdated_queries(self):
query1 = self.factory.create_query() query2 = self.factory.create_query(query_text='select 42;', data_source=self.factory.create_data_source()) oq = staticmethod((lambda : [query1, query2])) with patch('redash.tasks.queries.enqueue_query') as add_job_mock: with patch.object(Query, 'outdated_queries', oq): refresh_queries() self.assertEqual(add_job_mock.call_count, 2) add_job_mock.assert_has_calls([call(query1.query_text, query1.data_source, query1.user_id, scheduled_query=query1, metadata=ANY), call(query2.query_text, query2.data_source, query2.user_id, scheduled_query=query2, metadata=ANY)], any_order=True)
'refresh_queries() does not launch execution tasks for queries whose data source is paused.'
def test_doesnt_enqueue_outdated_queries_for_paused_data_source(self):
query = self.factory.create_query() oq = staticmethod((lambda : [query])) query.data_source.pause() with patch.object(Query, 'outdated_queries', oq): with patch('redash.tasks.queries.enqueue_query') as add_job_mock: refresh_queries() add_job_mock.assert_not_called() query.data_source.resume() with patch('redash.tasks.queries.enqueue_query') as add_job_mock: refresh_queries() add_job_mock.assert_called_with(query.query_text, query.data_source, query.user_id, scheduled_query=query, metadata=ANY)
'``execute_query`` invokes the query runner and stores a query result.'
def test_success(self):
cm = mock.patch('celery.app.task.Context.delivery_info', {'routing_key': 'test'}) with cm: with mock.patch.object(PostgreSQL, 'run_query') as qr: qr.return_value = ([1, 2], None) result_id = execute_query('SELECT 1, 2', self.factory.data_source.id, {}) self.assertEqual(1, qr.call_count) result = models.QueryResult.query.get(result_id) self.assertEqual(result.data, '{1,2}')
'Scheduled queries remember their latest results.'
def test_success_scheduled(self):
cm = mock.patch('celery.app.task.Context.delivery_info', {'routing_key': 'test'}) q = self.factory.create_query(query_text='SELECT 1, 2', schedule=300) with cm: with mock.patch.object(PostgreSQL, 'run_query') as qr: qr.return_value = ([1, 2], None) result_id = execute_query('SELECT 1, 2', self.factory.data_source.id, {}, scheduled_query_id=q.id) q = models.Query.get_by_id(q.id) self.assertEqual(q.schedule_failures, 0) result = models.QueryResult.query.get(result_id) self.assertEqual(q.latest_query_data, result)
'Scheduled queries that fail have their failure recorded.'
def test_failure_scheduled(self):
cm = mock.patch('celery.app.task.Context.delivery_info', {'routing_key': 'test'}) q = self.factory.create_query(query_text='SELECT 1, 2', schedule=300) with cm: with mock.patch.object(PostgreSQL, 'run_query') as qr: qr.exception = ValueError('broken') execute_query('SELECT 1, 2', self.factory.data_source.id, {}, scheduled_query_id=q.id) self.assertEqual(q.schedule_failures, 1) execute_query('SELECT 1, 2', self.factory.data_source.id, {}, scheduled_query_id=q.id) q = models.Query.get_by_id(q.id) self.assertEqual(q.schedule_failures, 2)
'Query execution success resets the failure counter.'
def test_success_after_failure(self):
cm = mock.patch('celery.app.task.Context.delivery_info', {'routing_key': 'test'}) q = self.factory.create_query(query_text='SELECT 1, 2', schedule=300) with cm: with mock.patch.object(PostgreSQL, 'run_query') as qr: qr.exception = ValueError('broken') execute_query('SELECT 1, 2', self.factory.data_source.id, {}, scheduled_query_id=q.id) q = models.Query.get_by_id(q.id) self.assertEqual(q.schedule_failures, 1) with cm: with mock.patch.object(PostgreSQL, 'run_query') as qr: qr.return_value = ([1, 2], None) execute_query('SELECT 1, 2', self.factory.data_source.id, {}, scheduled_query_id=q.id) q = models.Query.get_by_id(q.id) self.assertEqual(q.schedule_failures, 0)
'Only one query per data source with the same text will be reported by Query.outdated_queries().'
def test_enqueues_query_only_once(self):
query = self.factory.create_query(schedule='60') query2 = self.factory.create_query(schedule='60', query_text=query.query_text, query_hash=query.query_hash) retrieved_at = (utcnow() - datetime.timedelta(minutes=10)) query_result = self.factory.create_query_result(retrieved_at=retrieved_at, query_text=query.query_text, query_hash=query.query_hash) query.latest_query_data = query_result query2.latest_query_data = query_result self.assertEqual(list(models.Query.outdated_queries()), [query2])
'Queries from different data sources will be reported by Query.outdated_queries() even if they have the same query text.'
def test_enqueues_query_with_correct_data_source(self):
query = self.factory.create_query(schedule='60', data_source=self.factory.create_data_source()) query2 = self.factory.create_query(schedule='60', query_text=query.query_text, query_hash=query.query_hash) retrieved_at = (utcnow() - datetime.timedelta(minutes=10)) query_result = self.factory.create_query_result(retrieved_at=retrieved_at, query_text=query.query_text, query_hash=query.query_hash) query.latest_query_data = query_result query2.latest_query_data = query_result self.assertEqual(list(models.Query.outdated_queries()), [query2, query])
'If multiple queries with the same text exist, only ones that are scheduled to be refreshed are reported by Query.outdated_queries().'
def test_enqueues_only_for_relevant_data_source(self):
query = self.factory.create_query(schedule='60') query2 = self.factory.create_query(schedule='3600', query_text=query.query_text, query_hash=query.query_hash) retrieved_at = (utcnow() - datetime.timedelta(minutes=10)) query_result = self.factory.create_query_result(retrieved_at=retrieved_at, query_text=query.query_text, query_hash=query.query_hash) query.latest_query_data = query_result query2.latest_query_data = query_result self.assertEqual(list(models.Query.outdated_queries()), [query])
'Execution failures recorded for a query result in exponential backoff for scheduling future execution.'
def test_failure_extends_schedule(self):
query = self.factory.create_query(schedule='60', schedule_failures=4) retrieved_at = (utcnow() - datetime.timedelta(minutes=16)) query_result = self.factory.create_query_result(retrieved_at=retrieved_at, query_text=query.query_text, query_hash=query.query_hash) query.latest_query_data = query_result self.assertEqual(list(models.Query.outdated_queries()), []) query_result.retrieved_at = (utcnow() - datetime.timedelta(minutes=17)) self.assertEqual(list(models.Query.outdated_queries()), [query])
'Custom hooks which controls the way objects/lists/tuples/dicts behave in RestrictedPython'
@staticmethod def custom_write(obj):
return obj
'Helper function to add columns inside a Python script running in Redash in an easier way Parameters: :result dict: The result dict :column_name string: Name of the column, which should be consisted of lowercase latin letters or underscore. :friendly_name string: Name of the column for display :column_type string: Type of the column. Check supported data types for details.'
@staticmethod def add_result_column(result, column_name, friendly_name, column_type):
if (column_type not in SUPPORTED_COLUMN_TYPES): raise Exception("'{0}' is not a supported column type".format(column_type)) if ('columns' not in result): result['columns'] = [] result['columns'].append({'name': column_name, 'friendly_name': friendly_name, 'type': column_type})
'Helper function to add one row to results set. Parameters: :result dict: The result dict :values dict: One row of result in dict. The key should be one of the column names. The value is the value of the column in this row.'
@staticmethod def add_result_row(result, values):
if ('rows' not in result): result['rows'] = [] result['rows'].append(values)
'Run query from specific data source. Parameters: :data_source_name_or_id string|integer: Name or ID of the data source :query string: Query to run'
@staticmethod def execute_query(data_source_name_or_id, query):
try: if (type(data_source_name_or_id) == int): data_source = models.DataSource.get_by_id(data_source_name_or_id) else: data_source = models.DataSource.get_by_name(data_source_name_or_id) except models.NoResultFound: raise Exception(('Wrong data source name/id: %s.' % data_source_name_or_id)) (data, error) = data_source.query_runner.run_query(query, None) if (error is not None): raise Exception(error) return json.loads(data)
'Get schema from specific data source. :param data_source_name_or_id: string|integer: Name or ID of the data source :return:'
@staticmethod def get_source_schema(data_source_name_or_id):
try: if (type(data_source_name_or_id) == int): data_source = models.DataSource.get_by_id(data_source_name_or_id) else: data_source = models.DataSource.get_by_name(data_source_name_or_id) except models.NoResultFound: raise Exception(('Wrong data source name/id: %s.' % data_source_name_or_id)) schema = data_source.query_runner.get_schema() return schema
'Get result of an existing query. Parameters: :query_id integer: ID of existing query'
@staticmethod def get_query_result(query_id):
try: query = models.Query.get_by_id(query_id) except models.NoResultFound: raise Exception(('Query id %s does not exist.' % query_id)) if (query.latest_query_data is None): raise Exception('Query does not have results yet.') if (query.latest_query_data.data is None): raise Exception('Query does not have results yet.') return json.loads(query.latest_query_data.data)
'Execute a query (or retrieve recent results). :qparam string query: The query text to execute :qparam number query_id: The query object to update with the result (optional) :qparam number max_age: If query results less than `max_age` seconds old are available, return them, otherwise execute the query; if omitted, always execute :qparam number data_source_id: ID of data source to query'
@require_permission('execute_query') def post(self):
params = request.get_json(force=True) parameter_values = collect_parameters_from_request(request.args) query = params['query'] max_age = int(params.get('max_age', (-1))) query_id = params.get('query_id', 'adhoc') data_source = models.DataSource.get_by_id_and_org(params.get('data_source_id'), self.current_org) if (not has_access(data_source.groups, self.current_user, not_view_only)): return ({'job': {'status': 4, 'error': 'You do not have permission to run queries with this data source.'}}, 403) self.record_event({'action': 'execute_query', 'timestamp': int(time.time()), 'object_id': data_source.id, 'object_type': 'data_source', 'query': query}) return run_query(data_source, parameter_values, query, query_id, max_age)
'Retrieve query results. :param number query_id: The ID of the query whose results should be fetched :param number query_result_id: the ID of the query result to fetch :param string filetype: Format to return. One of \'json\', \'xlsx\', or \'csv\'. Defaults to \'json\'. :<json number id: Query result ID :<json string query: Query that produced this result :<json string query_hash: Hash code for query text :<json object data: Query output :<json number data_source_id: ID of data source that produced this result :<json number runtime: Length of execution time in seconds :<json string retrieved_at: Query retrieval date/time, in ISO format'
@require_permission('view_query') def get(self, query_id=None, query_result_id=None, filetype='json'):
should_cache = (query_result_id is not None) parameter_values = collect_parameters_from_request(request.args) max_age = int(request.args.get('maxAge', 0)) query_result = None if query_result_id: query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query_result_id, self.current_org) elif (query_id is not None): query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org) if (query is not None): if (settings.ALLOW_PARAMETERS_IN_EMBEDS and parameter_values): query_result = run_query_sync(query.data_source, parameter_values, query.to_dict()['query'], max_age=max_age) elif (query.latest_query_data_id is not None): query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query.latest_query_data_id, self.current_org) if query_result: require_access(query_result.data_source.groups, self.current_user, view_only) if isinstance(self.current_user, models.ApiUser): event = {'user_id': None, 'org_id': self.current_org.id, 'action': 'api_get', 'timestamp': int(time.time()), 'api_key': self.current_user.name, 'file_type': filetype, 'user_agent': request.user_agent.string, 'ip': request.remote_addr} if query_id: event['object_type'] = 'query' event['object_id'] = query_id else: event['object_type'] = 'query_result' event['object_id'] = query_result_id record_event.delay(event) if (filetype == 'json'): response = self.make_json_response(query_result) elif (filetype == 'xlsx'): response = self.make_excel_response(query_result) else: response = self.make_csv_response(query_result) if (len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0): self.add_cors_headers(response.headers) if should_cache: response.headers.add_header('Cache-Control', ('max-age=%d' % ONE_YEAR)) return response else: abort(404, message='No cached result found for this query.')
'Retrieve info about a running query job.'
def get(self, job_id):
job = QueryTask(job_id=job_id) return {'job': job.to_dict()}
'Cancel a query job in progress.'
def delete(self, job_id):
job = QueryTask(job_id=job_id) job.cancel()
'Search query text, titles, and descriptions. :qparam string q: Search term Responds with a list of :ref:`query <query-response-label>` objects.'
@require_permission('view_query') def get(self):
term = request.args.get('q', '') include_drafts = (request.args.get('include_drafts') is not None) return [q.to_dict(with_last_modified_by=False) for q in models.Query.search(term, self.current_user.group_ids, include_drafts=include_drafts)]
'Retrieve up to 20 queries modified in the last 7 days. Responds with a list of :ref:`query <query-response-label>` objects.'
@require_permission('view_query') def get(self):
if settings.FEATURE_DUMB_RECENTS: results = models.Query.by_user(self.current_user).order_by(models.Query.updated_at.desc()).limit(10) queries = [q.to_dict(with_last_modified_by=False, with_user=False) for q in results] else: queries = models.Query.recent(self.current_user.group_ids, self.current_user.id) recent = [d.to_dict(with_last_modified_by=False, with_user=False) for d in queries] global_recent = [] if (len(recent) < 10): global_recent = [d.to_dict(with_last_modified_by=False, with_user=False) for d in models.Query.recent(self.current_user.group_ids)] queries = take(20, distinct(chain(recent, global_recent), key=(lambda d: d['id']))) return queries
'Create a new query. :<json number data_source_id: The ID of the data source this query will run on :<json string query: Query text :<json string name: :<json string description: :<json string schedule: Schedule interval, in seconds, for repeated execution of this query :<json object options: Query options .. _query-response-label: :>json number id: Query ID :>json number latest_query_data_id: ID for latest output data from this query :>json string name: :>json string description: :>json string query: Query text :>json string query_hash: Hash of query text :>json string schedule: Schedule interval, in seconds, for repeated execution of this query :>json string api_key: Key for public access to this query\'s results. :>json boolean is_archived: Whether this query is displayed in indexes and search results or not. :>json boolean is_draft: Whether this query is a draft or not :>json string updated_at: Time of last modification, in ISO format :>json string created_at: Time of creation, in ISO format :>json number data_source_id: ID of the data source this query will run on :>json object options: Query options :>json number version: Revision version (for update conflict avoidance) :>json number user_id: ID of query creator :>json number last_modified_by_id: ID of user who last modified this query :>json string retrieved_at: Time when query results were last retrieved, in ISO format (may be null) :>json number runtime: Runtime of last query execution, in seconds (may be null)'
@require_permission('create_query') def post(self):
query_def = request.get_json(force=True) data_source = models.DataSource.get_by_id_and_org(query_def.pop('data_source_id'), self.current_org) require_access(data_source.groups, self.current_user, not_view_only) for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']: query_def.pop(field, None) query_def['query_text'] = query_def.pop('query') query_def['user'] = self.current_user query_def['data_source'] = data_source query_def['org'] = self.current_org query_def['is_draft'] = True query = models.Query.create(**query_def) models.db.session.add(query) models.db.session.commit() self.record_event({'action': 'create', 'object_id': query.id, 'object_type': 'query'}) return query.to_dict()
'Retrieve a list of queries. :qparam number page_size: Number of queries to return :qparam number page: Page number to retrieve Responds with an array of :ref:`query <query-response-label>` objects.'
@require_permission('view_query') def get(self):
results = models.Query.all_queries(self.current_user.group_ids, self.current_user.id) page = request.args.get('page', 1, type=int) page_size = request.args.get('page_size', 25, type=int) return paginate(results, page, page_size, (lambda q: q.to_dict(with_stats=True, with_last_modified_by=False)))
'Retrieve a list of queries created by the current user. :qparam number page_size: Number of queries to return :qparam number page: Page number to retrieve Responds with an array of :ref:`query <query-response-label>` objects.'
@require_permission('view_query') def get(self):
drafts = (request.args.get('drafts') is not None) results = models.Query.by_user(self.current_user) page = request.args.get('page', 1, type=int) page_size = request.args.get('page_size', 25, type=int) return paginate(results, page, page_size, (lambda q: q.to_dict(with_stats=True, with_last_modified_by=False)))
'Modify a query. :param query_id: ID of query to update :<json number data_source_id: The ID of the data source this query will run on :<json string query: Query text :<json string name: :<json string description: :<json string schedule: Schedule interval, in seconds, for repeated execution of this query :<json object options: Query options Responds with the updated :ref:`query <query-response-label>` object.'
@require_permission('edit_query') def post(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org) query_def = request.get_json(force=True) require_object_modify_permission(query, self.current_user) for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by', 'org']: query_def.pop(field, None) if ('query' in query_def): query_def['query_text'] = query_def.pop('query') query_def['last_modified_by'] = self.current_user query_def['changed_by'] = self.current_user if (('version' in query_def) and (query_def['version'] != query.version)): abort(409) try: self.update_model(query, query_def) models.db.session.commit() except StaleDataError: abort(409) return query.to_dict(with_visualizations=True)
'Retrieve a query. :param query_id: ID of query to fetch Responds with the :ref:`query <query-response-label>` contents.'
@require_permission('view_query') def get(self, query_id):
q = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org) require_access(q.groups, self.current_user, view_only) result = q.to_dict(with_visualizations=True) result['can_edit'] = can_modify(q, self.current_user) return result
'Archives a query. :param query_id: ID of query to archive'
def delete(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org) require_admin_or_owner(query.user_id) query.archive(self.current_user) models.db.session.commit()
'Creates a new query, copying the query text from an existing one. :param query_id: ID of query to fork Responds with created :ref:`query <query-response-label>` object.'
@require_permission('edit_query') def post(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org) forked_query = query.fork(self.current_user) models.db.session.commit() return forked_query.to_dict(with_visualizations=True)
'Execute a query, updating the query object with the results. :param query_id: ID of query to execute Responds with query task details.'
def post(self, query_id):
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org) require_access(query.groups, self.current_user, not_view_only) parameter_values = collect_parameters_from_request(request.args) return run_query(query.data_source, parameter_values, query.query_text, query.id)
'Add a widget to a dashboard. :<json number dashboard_id: The ID for the dashboard being added to :<json visualization_id: The ID of the visualization to put in this widget :<json object options: Widget options :<json string text: Text box contents :<json number width: Width for widget display :>json object widget: The created widget :>json array layout: The new layout of the dashboard this widget was added to :>json boolean new_row: Whether this widget was added on a new row or not :>json number version: The revision number of the dashboard'
@require_permission('edit_dashboard') def post(self):
widget_properties = request.get_json(force=True) dashboard = models.Dashboard.get_by_id_and_org(widget_properties.pop('dashboard_id'), self.current_org) require_object_modify_permission(dashboard, self.current_user) widget_properties['options'] = json.dumps(widget_properties['options']) widget_properties.pop('id', None) widget_properties['dashboard'] = dashboard visualization_id = widget_properties.pop('visualization_id') if visualization_id: visualization = models.Visualization.get_by_id_and_org(visualization_id, self.current_org) require_access(visualization.query_rel.groups, self.current_user, view_only) else: visualization = None widget_properties['visualization'] = visualization widget = models.Widget(**widget_properties) models.db.session.add(widget) models.db.session.commit() layout = json.loads(widget.dashboard.layout) new_row = True if ((len(layout) == 0) or (widget.width == 2)): layout.append([widget.id]) elif (len(layout[(-1)]) == 1): neighbour_widget = models.Widget.query.get(layout[(-1)][0]) if (neighbour_widget.width == 1): layout[(-1)].append(widget.id) new_row = False else: layout.append([widget.id]) else: layout.append([widget.id]) widget.dashboard.layout = json.dumps(layout) models.db.session.add(widget.dashboard) models.db.session.commit() return {'widget': widget.to_dict(), 'layout': layout, 'new_row': new_row, 'version': dashboard.version}
'Updates a widget in a dashboard. This method currently handles Text Box widgets only. :param number widget_id: The ID of the widget to modify :<json string text: The new contents of the text box'
@require_permission('edit_dashboard') def post(self, widget_id):
widget = models.Widget.get_by_id_and_org(widget_id, self.current_org) require_object_modify_permission(widget.dashboard, self.current_user) widget_properties = request.get_json(force=True) widget.text = widget_properties['text'] models.db.session.commit() return widget.to_dict()
'Remove a widget from a dashboard. :param number widget_id: ID of widget to remove :>json array layout: New layout of dashboard this widget was removed from :>json number version: Revision number of dashboard'
@require_permission('edit_dashboard') def delete(self, widget_id):
widget = models.Widget.get_by_id_and_org(widget_id, self.current_org) require_object_modify_permission(widget.dashboard, self.current_user) widget.delete() models.db.session.commit() return {'layout': widget.dashboard.layout, 'version': widget.dashboard.version}
'Lists dashboards modified in the last 7 days.'
@require_permission('list_dashboards') def get(self):
if settings.FEATURE_DUMB_RECENTS: dashboards = models.Dashboard.all(self.current_org, self.current_user.group_ids, self.current_user.id).order_by(models.Dashboard.updated_at.desc()).limit(10) dashboards = [d.to_dict() for d in dashboards] else: recent = [d.to_dict() for d in models.Dashboard.recent(self.current_org, self.current_user.group_ids, self.current_user.id, for_user=True)] global_recent = [] if (len(recent) < 10): global_recent = [d.to_dict() for d in models.Dashboard.recent(self.current_org, self.current_user.group_ids, self.current_user.id)] dashboards = take(20, distinct(chain(recent, global_recent), key=(lambda d: d['id']))) return dashboards